Incus instances and service freeze

I am encountering recurring issues with the Incus service. During these occurrences, Incus becomes unresponsive to all CLI commands, and none of the instances, including VMs and LXC containers, respond either. After a short period, the service unfreezes, and all the instances come back online. I have checked the uptime of the VM, and it accurately reflects the time since it was started (not the freeze time). Here are the details from the logs.

# journalctl -u incus -n 100

Apr 16 02:19:59 truenas systemd[1]: Starting incus.service - Incus - Main daemon...
Apr 16 02:19:59 truenas incus[21314]: If this is your first time running Incus on this machine, you should also run: incus admin init
Apr 16 02:20:00 truenas systemd[1]: Started incus.service - Incus - Main daemon.
Apr 16 02:20:25 truenas systemd[1]: Stopping incus.service - Incus - Main daemon...
Apr 16 02:20:25 truenas systemd[1]: incus.service: Deactivated successfully.
Apr 16 02:20:25 truenas systemd[1]: Stopped incus.service - Incus - Main daemon.
Apr 16 02:20:25 truenas systemd[1]: incus.service: Consumed 1.259s CPU time.
Apr 16 02:20:25 truenas systemd[1]: Starting incus.service - Incus - Main daemon...
Apr 16 02:20:34 truenas systemd[1]: Started incus.service - Incus - Main daemon.
Apr 16 03:04:06 truenas incusd[22165]: time="2025-04-16T03:04:06-07:00" level=warning msg="Failed getting exec control websocket reader, killing command" PID=0 err="websocket: close 1006 (abnormal closure): unexpected EOF" instance=home-assista>
Apr 16 04:13:17 truenas incusd[22165]: time="2025-04-16T04:13:17-07:00" level=warning msg="Failed getting exec control websocket reader, killing command" PID=195724 err="websocket: close 1006 (abnormal closure): unexpected EOF" instance=docker >
Apr 16 04:43:41 truenas incusd[22165]: time="2025-04-16T04:43:41-07:00" level=warning msg="Failed getting exec control websocket reader, killing command" PID=231616 err="websocket: close 1006 (abnormal closure): unexpected EOF" instance=docker >
Apr 16 04:45:15 truenas incusd[22165]: time="2025-04-16T04:45:15-07:00" level=warning msg="Failed getting exec control websocket reader, killing command" PID=253094 err="websocket: close 1006 (abnormal closure): unexpected EOF" instance=docker >
Apr 16 04:59:18 truenas incusd[22165]: time="2025-04-16T04:59:18-07:00" level=warning msg="Failed getting exec control websocket reader, killing command" PID=258661 err="websocket: close 1006 (abnormal closure): unexpected EOF" instance=ubuntu->
Apr 22 05:04:07 truenas incusd[22165]: time="2025-04-22T05:04:07-07:00" level=warning msg="Failed getting exec control websocket reader, killing command" PID=589321 err="websocket: close 1006 (abnormal closure): unexpected EOF" instance=docker >
Apr 22 05:20:30 truenas incusd[22165]: time="2025-04-22T05:20:30-07:00" level=warning msg="Failed getting exec control websocket reader, killing command" PID=625969 err="websocket: close 1006 (abnormal closure): unexpected EOF" instance=docker >
Apr 29 12:38:17 truenas incusd[22165]: time="2025-04-29T12:38:17-07:00" level=warning msg="Could not get VM state from agent" err="dial unix /run/incus/home-assistant/qemu.monitor: connect: no such file or directory" instance=home-assistant ins>
Apr 29 12:38:17 truenas incusd[22165]: time="2025-04-29T12:38:17-07:00" level=warning msg="Failed getting host interface state for MTU" device=eth0 driver=nic err="route ip+net: invalid network interface name" host_name= instance=home-assistant>
May 06 06:12:32 truenas incusd[22165]: time="2025-05-06T06:12:32-07:00" level=error msg="Failed writing error for HTTP response" err="write unix /var/lib/incus/unix.socket->@: write: broken pipe" url=/1.0/instances writeErr="write unix /var/lib>
May 06 06:12:32 truenas incusd[22165]: time="2025-05-06T06:12:32-07:00" level=error msg="Failed writing error for HTTP response" err="write unix /var/lib/incus/unix.socket->@: write: broken pipe" url=/1.0/instances writeErr="write unix /var/lib>

And here is the config of my VM

incus config show home-assistant
architecture: x86_64
config:
  boot.autostart: "false"
  limits.cpu: "2"
  limits.memory: 4096MiB
  raw.idmap: |-
    uid 568 568
    gid 568 568
  raw.qemu: -object secret,id=vnc0,file=/var/run/middleware/incus/passwords/home-assistant
    -vnc :0,password-secret=vnc0
  security.secureboot: "false"
  user.autostart: "true"
  user.ix_old_raw_qemu_config: -object secret,id=vnc0,file=/var/run/middleware/incus/passwords/home-assistant
    -vnc :0,password-secret=vnc0
  user.ix_vnc_config: '{"vnc_enabled": true, "vnc_port": 5900, "vnc_password": "secret"}'
  volatile.cloud-init.instance-id: ebd973ea-d31f-4b79-91f6-76d45bda7521
  volatile.eth0.host_name: tape7cfc662
  volatile.eth0.hwaddr: 00:16:3e:be:2c:fc
  volatile.last_state.power: RUNNING
  volatile.last_state.ready: "false"
  volatile.uuid: deb4b00c-46fc-4bdd-86a3-1aa537158cb8
  volatile.uuid.generation: deb4b00c-46fc-4bdd-86a3-1aa537158cb8
  volatile.vsock_id: "3387727789"
devices:
  eth0:
    nictype: bridged
    parent: br0
    type: nic
  haos:
    boot.priority: "1"
    io.bus: nvme
    pool: nvme
    source: haos
    type: disk
  root:
    io.bus: nvme
    path: /
    pool: nvme
    size: "21474836480"
    type: disk
  usb0:
    productid: 55d4
    type: usb
ephemeral: false
profiles:
- default
stateful: false
description: ""

And my docker LXC

incus config show docker
architecture: x86_64
config:
  boot.autostart: "false"
  image.architecture: amd64
  image.description: Debian bookworm amd64 (20250422_05:24)
  image.os: Debian
  image.release: bookworm
  image.serial: "20250422_05:24"
  image.type: squashfs
  image.variant: default
  raw.idmap: |-
    uid 568 568
    gid 568 568
  user.autostart: "true"
  volatile.base_image: e2cda91edd21d48cf55c0d0421e6ef99463f3e026338aeca698aa2f8e5ba7fd0
  volatile.cloud-init.instance-id: 4e382f11-0c27-4024-af93-0c2d8fa0e427
  volatile.eth0.hwaddr: 00:16:3e:55:8f:7e
  volatile.eth0.name: eth0
  volatile.idmap.base: "0"
  volatile.idmap.current: '[{"Isuid":true,"Isgid":false,"Hostid":2147000001,"Nsid":0,"Maprange":568},{"Isuid":true,"Isgid":false,"Hostid":568,"Nsid":568,"Maprange":1},{"Isuid":true,"Isgid":false,"Hostid":2147000570,"Nsid":569,"Maprange":458183},{"Isuid":false,"Isgid":true,"Hostid":2147000001,"Nsid":0,"Maprange":568},{"Isuid":false,"Isgid":true,"Hostid":568,"Nsid":568,"Maprange":1},{"Isuid":false,"Isgid":true,"Hostid":2147000570,"Nsid":569,"Maprange":458183}]'
  volatile.idmap.next: '[{"Isuid":true,"Isgid":false,"Hostid":2147000001,"Nsid":0,"Maprange":568},{"Isuid":true,"Isgid":false,"Hostid":568,"Nsid":568,"Maprange":1},{"Isuid":true,"Isgid":false,"Hostid":2147000570,"Nsid":569,"Maprange":458183},{"Isuid":false,"Isgid":true,"Hostid":2147000001,"Nsid":0,"Maprange":568},{"Isuid":false,"Isgid":true,"Hostid":568,"Nsid":568,"Maprange":1},{"Isuid":false,"Isgid":true,"Hostid":2147000570,"Nsid":569,"Maprange":458183}]'
  volatile.last_state.idmap: '[]'
  volatile.last_state.power: STOPPED
  volatile.last_state.ready: "false"
  volatile.uuid: b0df7594-d82d-4571-a58c-763b107c517f
  volatile.uuid.generation: b0df7594-d82d-4571-a58c-763b107c517f
devices:
  disk0:
    path: /mnt/nvme/docker
    source: /mnt/nvme/docker
    type: disk
  disk1:
    path: /mnt/tank/media
    source: /mnt/tank/media
    type: disk
  disk2:
    path: /mnt/tank/photos
    source: /mnt/tank/photos
    type: disk
  eth0:
    nictype: bridged
    parent: br0
    type: nic
  root:
    path: /
    pool: nvme
    type: disk
ephemeral: false
profiles:
- default
stateful: false