@unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x8}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0xe800, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0xe8}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc0045878, &(0x7f0000001440)) 00:19:54 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) socket$inet6_udplite(0xa, 0x2, 0x88) r1 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x141001, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040), &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0), &(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x0, 0x4, 0x49}) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) socket$inet6_udplite(0xa, 0x2, 0x88) (async) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x141001, 0x0) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040), &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0), &(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x0, 0x4, 0x49}) (async) [ 1194.352387][T31231] x_tables: duplicate underflow at hook 1 00:19:54 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x5002000000000000) 00:19:54 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0xff00, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 0: r0 = accept4$phonet_pipe(0xffffffffffffffff, &(0x7f0000000000), &(0x7f0000000040)=0x10, 0x80000) (async, rerun: 64) r1 = socket$phonet_pipe(0x23, 0x5, 0x2) (rerun: 64) getsockopt$PNPIPE_HANDLE(r1, 0x113, 0x3, 0x0, &(0x7f0000000240)) (async) accept4$phonet_pipe(r1, &(0x7f0000000140), &(0x7f0000000180)=0x10, 0x800) (async) getsockopt$PNPIPE_ENCAP(r0, 0x113, 0x1, &(0x7f0000000100), &(0x7f00000000c0)=0x4) (async, rerun: 64) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000080)) (rerun: 64) 00:19:54 executing program 1: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x5c00000000000000) 00:19:54 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc0045878, &(0x7f0000001440)) 00:19:54 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0x2}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0xff03, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x6000000000000000) 00:19:54 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000080)) 00:19:54 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc0189436, &(0x7f0000001440)) 00:19:54 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0x4}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x1000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000080)) 00:19:54 executing program 1: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x6000000000000000) 00:19:54 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0x8}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x2000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc0189436, &(0x7f0000001440)) 00:19:54 executing program 3: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x6800000000000000) 00:19:54 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0xe8}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000040), 0x400, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000180)={&(0x7f0000000080)=[0x0], &(0x7f00000000c0)=[0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x1, 0x1, 0x6, 0x6}) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:19:54 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0x1d0}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x3000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0x2b8}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x4000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x68ce0583ffff0000) 00:19:54 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000040), 0x400, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000180)={&(0x7f0000000080)=[0x0], &(0x7f00000000c0)=[0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x1, 0x1, 0x6, 0x6}) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:19:54 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x5000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc020660b, &(0x7f0000001440)) 00:19:54 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0xb802}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x6000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:54 executing program 2: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x6c00000000000000) 00:19:54 executing program 1: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:19:55 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0xd001}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:55 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x7000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 0: r0 = socket$inet_sctp(0x2, 0x5, 0x84) bind$inet(r0, &(0x7f0000000000)={0x2, 0x0, @remote}, 0x10) bind$inet(r0, 0x0, 0x0) ioctl$BTRFS_IOC_SET_RECEIVED_SUBVOL(0xffffffffffffffff, 0xc0c89425, &(0x7f00000000c0)={"d9c135c75798cc19df32bd7d7b6319c8", 0x0, 0x0, {0x6}, {0xffffffffffffffc0, 0x1}, 0x9, [0x0, 0x8, 0x8, 0x0, 0x5, 0x4, 0x56b, 0x7, 0x1, 0x908, 0x1ff, 0xffffffffffffffff, 0x8000000000000000, 0x5, 0x6, 0x8]}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f00000001c0)={0x0, ""/256, 0x0, 0x0, 0x0, 0x0, ""/16, ""/16, ""/16, 0x0}) ioctl$BTRFS_IOC_SET_RECEIVED_SUBVOL(r0, 0xc0c89425, &(0x7f00000003c0)={"2fbe1658ed3d2699d4d8bb08a9e496fe", r1, r2, {0x100000000, 0x30}, {0x3, 0x1}, 0x5, [0x0, 0x480000000000, 0x5, 0x5, 0x8, 0x45, 0x1, 0x81, 0x6, 0x6, 0x7de5cf91, 0xffff, 0xffffffffffffff5b, 0x7, 0x0, 0x4]}) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000080)) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000040)) 00:19:57 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0xe800}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc020660b, &(0x7f0000001440)) 00:19:57 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x8000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 1: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x7400000000000000) 00:19:57 executing program 1: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) 00:19:57 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x7a00000000000000) 00:19:57 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x9000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0xff00}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 1: r0 = socket$inet_sctp(0x2, 0x5, 0x84) bind$inet(r0, &(0x7f0000000000)={0x2, 0x0, @remote}, 0x10) bind$inet(r0, 0x0, 0x0) ioctl$BTRFS_IOC_SET_RECEIVED_SUBVOL(0xffffffffffffffff, 0xc0c89425, &(0x7f00000000c0)={"d9c135c75798cc19df32bd7d7b6319c8", 0x0, 0x0, {0x6}, {0xffffffffffffffc0, 0x1}, 0x9, [0x0, 0x8, 0x8, 0x0, 0x5, 0x4, 0x56b, 0x7, 0x1, 0x908, 0x1ff, 0xffffffffffffffff, 0x8000000000000000, 0x5, 0x6, 0x8]}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f00000001c0)={0x0, ""/256, 0x0, 0x0, 0x0, 0x0, ""/16, ""/16, ""/16, 0x0}) ioctl$BTRFS_IOC_SET_RECEIVED_SUBVOL(r0, 0xc0c89425, &(0x7f00000003c0)={"2fbe1658ed3d2699d4d8bb08a9e496fe", r1, r2, {0x100000000, 0x30}, {0x3, 0x1}, 0x5, [0x0, 0x480000000000, 0x5, 0x5, 0x8, 0x45, 0x1, 0x81, 0x6, 0x6, 0x7de5cf91, 0xffff, 0xffffffffffffff5b, 0x7, 0x0, 0x4]}) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000080)) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000040)) socket$inet_sctp(0x2, 0x5, 0x84) (async) bind$inet(r0, &(0x7f0000000000)={0x2, 0x0, @remote}, 0x10) (async) bind$inet(r0, 0x0, 0x0) (async) ioctl$BTRFS_IOC_SET_RECEIVED_SUBVOL(0xffffffffffffffff, 0xc0c89425, &(0x7f00000000c0)={"d9c135c75798cc19df32bd7d7b6319c8", 0x0, 0x0, {0x6}, {0xffffffffffffffc0, 0x1}, 0x9, [0x0, 0x8, 0x8, 0x0, 0x5, 0x4, 0x56b, 0x7, 0x1, 0x908, 0x1ff, 0xffffffffffffffff, 0x8000000000000000, 0x5, 0x6, 0x8]}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f00000001c0)) (async) ioctl$BTRFS_IOC_SET_RECEIVED_SUBVOL(r0, 0xc0c89425, &(0x7f00000003c0)={"2fbe1658ed3d2699d4d8bb08a9e496fe", r1, r2, {0x100000000, 0x30}, {0x3, 0x1}, 0x5, [0x0, 0x480000000000, 0x5, 0x5, 0x8, 0x45, 0x1, 0x81, 0x6, 0x6, 0x7de5cf91, 0xffff, 0xffffffffffffff5b, 0x7, 0x0, 0x4]}) (async) openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000080)) (async) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000040)) (async) 00:19:57 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc9480440, &(0x7f0000001440)) 00:19:57 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:19:57 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0xa000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0xff03}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x7b00000000000000) 00:19:57 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0xb000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:19:57 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0x1000000}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) 00:19:57 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xca540440, &(0x7f0000001440)) 00:19:57 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0xc000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0x2000000}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async, rerun: 32) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (rerun: 32) 00:19:57 executing program 2: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x7b00000000000000) 00:19:57 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0xd000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x8c00000000000000) 00:19:57 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0x4000000}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xdd480440, &(0x7f0000001440)) 00:19:57 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:19:57 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0xe000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0x8000000}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x9716000000000000) 00:19:57 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x10000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:19:57 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0xb8020000}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 2: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:19:57 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:19:57 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0xd0010000}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x11000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x97ffffff00000000) 00:19:57 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:19:57 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0xe8000000}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x12000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:19:57 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0xff000000}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 3: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x9900000000000000) 00:19:57 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:19:57 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x18000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:57 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0xff030000}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfffffdfd, &(0x7f0000001440)) 00:19:58 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x25000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe0}, {0x28}}}}, 0x3f0) 00:19:58 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) 00:19:58 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x48000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:19:58 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xffffff7f, &(0x7f0000001440)) 00:19:58 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x9e00000000000000) 00:19:58 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x4c000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x2}, {0x28}}}}, 0x3f0) 00:19:58 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) getsockopt$PNPIPE_ENCAP(0xffffffffffffffff, 0x113, 0x1, &(0x7f0000000000), &(0x7f0000000080)=0x4) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f00000000c0)) r1 = syz_open_dev$dri(&(0x7f0000000100), 0xff8, 0x4800) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000280)={&(0x7f0000000180)=[0x0], &(0x7f00000001c0)=[0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0], &(0x7f0000000240)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x1, 0x2, 0x3, 0x8}) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000140)) 00:19:58 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x4}, {0x28}}}}, 0x3f0) 00:19:58 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x300, &(0x7f0000001440)) 00:19:58 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x68000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 2: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0xba01000000000000) 00:19:58 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = openat$zero(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) read$usbfs(r1, 0x0, 0xffffffffffffffa7) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f00000001c0)={&(0x7f00000000c0)=[0x0, 0x0], &(0x7f0000000100)=[0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x2, 0x1, 0x4, 0x8}) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) 00:19:58 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0xce16000000000000) 00:19:58 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x6c000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0xe8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) getsockopt$PNPIPE_ENCAP(0xffffffffffffffff, 0x113, 0x1, &(0x7f0000000000), &(0x7f0000000080)=0x4) (async) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f00000000c0)) r1 = syz_open_dev$dri(&(0x7f0000000100), 0xff8, 0x4800) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000280)={&(0x7f0000000180)=[0x0], &(0x7f00000001c0)=[0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0], &(0x7f0000000240)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x1, 0x2, 0x3, 0x8}) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000140)) 00:19:58 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x500, &(0x7f0000001440)) 00:19:58 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x6d2a0000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x1d0}, {0x28}}}}, 0x3f0) 00:19:58 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = openat$zero(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) read$usbfs(r1, 0x0, 0xffffffffffffffa7) (async) read$usbfs(r1, 0x0, 0xffffffffffffffa7) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f00000001c0)={&(0x7f00000000c0)=[0x0, 0x0], &(0x7f0000000100)=[0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x2, 0x1, 0x4, 0x8}) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) 00:19:58 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x74000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) [ 1197.525173][T31457] workqueue: Failed to create a rescuer kthread for wq "nfc5_nci_cmd_wq": -EINTR [ 1198.543009][T31670] xt_check_table_hooks: 26 callbacks suppressed [ 1198.543026][T31670] x_tables: duplicate underflow at hook 1 00:19:58 executing program 3: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0xebffffff00000000) 00:19:58 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x2b8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x7a000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x600, &(0x7f0000001440)) 00:19:58 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) r2 = openat$nci(0xffffffffffffff9c, &(0x7f0000000080), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f00000000c0)) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, &(0x7f0000000b40)={0xc}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(r1, 0x3ba0, &(0x7f0000000b80)={0x48, 0x7, r1, 0x0, 0xf9038c6330f2e5cf, 0x0, 0x1, 0x2c83a5, 0x123400}) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f0000000000), &(0x7f0000000040)=0x4) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) [ 1198.632877][T31681] x_tables: duplicate underflow at hook 1 00:19:58 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0xb802}, {0x28}}}}, 0x3f0) 00:19:58 executing program 4: ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x6, 0x3, 0x2, 0x6}) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000140)) 00:19:58 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) (async) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) r2 = openat$nci(0xffffffffffffff9c, &(0x7f0000000080), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f00000000c0)) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, &(0x7f0000000b40)={0xc}) (async) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, &(0x7f0000000b40)={0xc}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(r1, 0x3ba0, &(0x7f0000000b80)={0x48, 0x7, r1, 0x0, 0xf9038c6330f2e5cf, 0x0, 0x1, 0x2c83a5, 0x123400}) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f0000000000), &(0x7f0000000040)=0x4) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:19:58 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0xb8020000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0xd001}, {0x28}}}}, 0x3f0) 00:19:58 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0xf702000000000000) 00:19:58 executing program 0: ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x6, 0x3, 0x2, 0x6}) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000140)) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x6, 0x3, 0x2, 0x6}) (async) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000140)) (async) [ 1198.766824][T31719] x_tables: duplicate underflow at hook 1 00:19:58 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x700, &(0x7f0000001440)) 00:19:58 executing program 3: r0 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x541b, 0x0) ioctl$MEDIA_IOC_G_TOPOLOGY(r0, 0xc0487c04, &(0x7f0000000800)={0x0, 0x7, 0x0, &(0x7f0000000040)=[{}, {}, {}, {}, {}, {}, {}], 0x8, 0x0, &(0x7f0000000300)=[{}, {}, {}, {}, {}, {}, {}, {}], 0x4, 0x0, &(0x7f0000000680)=[{}, {}, {}, {}], 0x6, 0x0, &(0x7f0000000880)=[{}, {}, {}, {}, {}, {}]}) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) 00:19:58 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0xe800}, {0x28}}}}, 0x3f0) 00:19:58 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0xd0010000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 1: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0xfdffffff00000000) 00:19:58 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0xff00}, {0x28}}}}, 0x3f0) [ 1198.892419][T31748] x_tables: duplicate underflow at hook 1 00:19:58 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x900, &(0x7f0000001440)) 00:19:58 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)=0x0) r2 = syz_open_dev$dri(&(0x7f0000000040), 0x0, 0x0) r3 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000140), 0x0, 0x0) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000180)) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r3, 0xc04064aa, &(0x7f00000004c0)={0x0, 0x0}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0], 0xa, 0x1, 0x4, 0x1}) r4 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) r5 = syz_genetlink_get_family_id$nl802154(&(0x7f0000000040), 0xffffffffffffffff) sendmsg$NL802154_CMD_SET_MAX_FRAME_RETRIES(r4, &(0x7f0000000140)={0x0, 0x0, &(0x7f0000000100)={&(0x7f0000000180)=ANY=[@ANYBLOB='$\x00\x00\x00', @ANYRES16=r5, @ANYBLOB="010026bd7000fbdbdf250f", @ANYBLOB], 0x24}}, 0x0) r6 = syz_genetlink_get_family_id$nfc(&(0x7f0000000200), 0xffffffffffffffff) sendmsg$NFC_CMD_DEV_UP(r4, &(0x7f00000002c0)={&(0x7f00000001c0)={0x10, 0x0, 0x0, 0x8000}, 0xc, &(0x7f0000000280)={&(0x7f0000000240)={0x24, r6, 0x800, 0x70bd2c, 0x25dfdbfb, {}, [@NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r1}, @NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r1}]}, 0x24}, 0x1, 0x0, 0x0, 0x4000}, 0x20000081) 00:19:58 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0xe8000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:58 executing program 1: r0 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x541b, 0x0) ioctl$MEDIA_IOC_G_TOPOLOGY(r0, 0xc0487c04, &(0x7f0000000800)={0x0, 0x7, 0x0, &(0x7f0000000040)=[{}, {}, {}, {}, {}, {}, {}], 0x8, 0x0, &(0x7f0000000300)=[{}, {}, {}, {}, {}, {}, {}, {}], 0x4, 0x0, &(0x7f0000000680)=[{}, {}, {}, {}], 0x6, 0x0, &(0x7f0000000880)=[{}, {}, {}, {}, {}, {}]}) (async, rerun: 32) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (rerun: 32) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) [ 1198.970141][T31764] x_tables: duplicate underflow at hook 1 [ 1198.971410][T31757] netlink: 16 bytes leftover after parsing attributes in process `syz-executor.4'. 00:19:58 executing program 2: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0xffff1f0000000000) 00:19:58 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0xff03}, {0x28}}}}, 0x3f0) 00:19:59 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0xff000000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:59 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)=0x0) r2 = syz_open_dev$dri(&(0x7f0000000040), 0x0, 0x0) r3 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000140), 0x0, 0x0) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000180)) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r3, 0xc04064aa, &(0x7f00000004c0)={0x0, 0x0}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0], 0xa, 0x1, 0x4, 0x1}) r4 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) r5 = syz_genetlink_get_family_id$nl802154(&(0x7f0000000040), 0xffffffffffffffff) sendmsg$NL802154_CMD_SET_MAX_FRAME_RETRIES(r4, &(0x7f0000000140)={0x0, 0x0, &(0x7f0000000100)={&(0x7f0000000180)=ANY=[@ANYBLOB='$\x00\x00\x00', @ANYRES16=r5, @ANYBLOB="010026bd7000fbdbdf250f", @ANYBLOB], 0x24}}, 0x0) r6 = syz_genetlink_get_family_id$nfc(&(0x7f0000000200), 0xffffffffffffffff) sendmsg$NFC_CMD_DEV_UP(r4, &(0x7f00000002c0)={&(0x7f00000001c0)={0x10, 0x0, 0x0, 0x8000}, 0xc, &(0x7f0000000280)={&(0x7f0000000240)={0x24, r6, 0x800, 0x70bd2c, 0x25dfdbfb, {}, [@NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r1}, @NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r1}]}, 0x24}, 0x1, 0x0, 0x0, 0x4000}, 0x20000081) 00:19:59 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x1000000}, {0x28}}}}, 0x3f0) [ 1199.048529][T31779] x_tables: duplicate underflow at hook 1 00:19:59 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x900, &(0x7f0000001440)) 00:19:59 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0xff030000, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) [ 1199.120822][T31784] netlink: 16 bytes leftover after parsing attributes in process `syz-executor.3'. 00:19:59 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x2000000}, {0x28}}}}, 0x3f0) [ 1199.134557][T31804] x_tables: duplicate underflow at hook 1 00:19:59 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0xffffff7f00000000) 00:19:59 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:59 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x4000000}, {0x28}}}}, 0x3f0) 00:19:59 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)=0x0) r2 = syz_open_dev$dri(&(0x7f0000000040), 0x0, 0x0) (async) r3 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000140), 0x0, 0x0) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000180)) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r3, 0xc04064aa, &(0x7f00000004c0)={0x0, 0x0}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0], 0xa, 0x1, 0x4, 0x1}) (async) r4 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) (async) r5 = syz_genetlink_get_family_id$nl802154(&(0x7f0000000040), 0xffffffffffffffff) sendmsg$NL802154_CMD_SET_MAX_FRAME_RETRIES(r4, &(0x7f0000000140)={0x0, 0x0, &(0x7f0000000100)={&(0x7f0000000180)=ANY=[@ANYBLOB='$\x00\x00\x00', @ANYRES16=r5, @ANYBLOB="010026bd7000fbdbdf250f", @ANYBLOB], 0x24}}, 0x0) (async) r6 = syz_genetlink_get_family_id$nfc(&(0x7f0000000200), 0xffffffffffffffff) sendmsg$NFC_CMD_DEV_UP(r4, &(0x7f00000002c0)={&(0x7f00000001c0)={0x10, 0x0, 0x0, 0x8000}, 0xc, &(0x7f0000000280)={&(0x7f0000000240)={0x24, r6, 0x800, 0x70bd2c, 0x25dfdbfb, {}, [@NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r1}, @NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r1}]}, 0x24}, 0x1, 0x0, 0x0, 0x4000}, 0x20000081) [ 1199.207779][T31819] x_tables: duplicate underflow at hook 1 00:19:59 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xa00, &(0x7f0000001440)) 00:19:59 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x2, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:59 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x8000000}, {0x28}}}}, 0x3f0) [ 1199.268913][T31837] x_tables: duplicate underflow at hook 1 00:19:59 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0xffffffff00000000) [ 1199.291234][T31843] netlink: 16 bytes leftover after parsing attributes in process `syz-executor.3'. 00:19:59 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x3, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) [ 1199.326431][T31854] x_tables: duplicate underflow at hook 1 00:19:59 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0xb8020000}, {0x28}}}}, 0x3f0) 00:19:59 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x4, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:59 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:19:59 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xb00, &(0x7f0000001440)) 00:19:59 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x5, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:59 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:19:59 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0xd0010000}, {0x28}}}}, 0x3f0) 00:19:59 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x6, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:59 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc00, &(0x7f0000001440)) 00:19:59 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:19:59 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x7, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:59 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0xe8000000}, {0x28}}}}, 0x3f0) 00:19:59 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:19:59 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x8, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:59 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:19:59 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xd00, &(0x7f0000001440)) 00:19:59 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0xff000000}, {0x28}}}}, 0x3f0) 00:19:59 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x9, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:59 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000100), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) r1 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r1, 0xc05064a7, &(0x7f00000003c0)={0x0, 0x0, 0x0, 0x0, 0x0, 0xfffffffffffffe4c, 0x0, 0x0, r2}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0], &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000001c0)=[0x0], &(0x7f0000000200)=[0x0, 0x0], 0x1, 0x4, 0x1, 0x2}) r3 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r3, 0x401c5820, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000040)) getsockopt$inet_opts(r3, 0x0, 0x9, &(0x7f0000000000)=""/20, &(0x7f0000000080)=0x14) 00:19:59 executing program 3: r0 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) r1 = syz_genetlink_get_family_id$netlbl_mgmt(&(0x7f0000000040), 0xffffffffffffffff) sendmsg$NLBL_MGMT_C_REMOVE(r0, &(0x7f0000000100)={&(0x7f0000000000)={0x10, 0x0, 0x0, 0x40000}, 0xc, &(0x7f00000000c0)={&(0x7f0000000080)={0x38, r1, 0x2, 0x70bd29, 0x25dfdbff, {}, [@NLBL_MGMT_A_IPV6ADDR={0x14, 0x5, @empty}, @NLBL_MGMT_A_IPV4ADDR={0x8, 0x7, @broadcast}, @NLBL_MGMT_A_IPV4ADDR={0x8, 0x7, @initdev={0xac, 0x1e, 0x1, 0x0}}]}, 0x38}, 0x1, 0x0, 0x0, 0x20000800}, 0x24040001) r2 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f0000001440)) 00:19:59 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0xa, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:19:59 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0xff030000}, {0x28}}}}, 0x3f0) 00:19:59 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000000)) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:00 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xe00, &(0x7f0000001440)) 00:20:00 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0xb, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:00 executing program 1: r0 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) r1 = syz_genetlink_get_family_id$netlbl_mgmt(&(0x7f0000000040), 0xffffffffffffffff) sendmsg$NLBL_MGMT_C_REMOVE(r0, &(0x7f0000000100)={&(0x7f0000000000)={0x10, 0x0, 0x0, 0x40000}, 0xc, &(0x7f00000000c0)={&(0x7f0000000080)={0x38, r1, 0x2, 0x70bd29, 0x25dfdbff, {}, [@NLBL_MGMT_A_IPV6ADDR={0x14, 0x5, @empty}, @NLBL_MGMT_A_IPV4ADDR={0x8, 0x7, @broadcast}, @NLBL_MGMT_A_IPV4ADDR={0x8, 0x7, @initdev={0xac, 0x1e, 0x1, 0x0}}]}, 0x38}, 0x1, 0x0, 0x0, 0x20000800}, 0x24040001) r2 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f0000001440)) 00:20:00 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x2}}, {0x28}}}}, 0x3f0) 00:20:00 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000100), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) (async) r1 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r1, 0xc05064a7, &(0x7f00000003c0)={0x0, 0x0, 0x0, 0x0, 0x0, 0xfffffffffffffe4c, 0x0, 0x0, r2}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0], &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000001c0)=[0x0], &(0x7f0000000200)=[0x0, 0x0], 0x1, 0x4, 0x1, 0x2}) (async) r3 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r3, 0x401c5820, 0x0) (async, rerun: 64) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000040)) (async, rerun: 64) getsockopt$inet_opts(r3, 0x0, 0x9, &(0x7f0000000000)=""/20, &(0x7f0000000080)=0x14) 00:20:00 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0xc, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:00 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x4}}, {0x28}}}}, 0x3f0) 00:20:00 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1100, &(0x7f0000001440)) 00:20:00 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000080)) 00:20:00 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0xd, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:00 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:00 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x8}}, {0x28}}}}, 0x3f0) 00:20:00 executing program 1: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000000), 0x600, 0x0) r1 = syz_open_dev$dri(&(0x7f0000000040), 0x9, 0x428503) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(r0, 0xc00864bf, &(0x7f0000000080)={0x0, 0x1}) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r1, 0xc01064c1, &(0x7f00000000c0)={r2}) ioctl$DRM_IOCTL_MODE_CURSOR2(r0, 0xc02464bb, 0x0) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f0000000140)={&(0x7f0000000100)=[0x0, 0x0], 0x2}) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000001440)) 00:20:00 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0xe, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:00 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1200, &(0x7f0000001440)) 00:20:00 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r1 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000080)) 00:20:00 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:00 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xe8}}, {0x28}}}}, 0x3f0) 00:20:00 executing program 2: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000000), 0x600, 0x0) r1 = syz_open_dev$dri(&(0x7f0000000040), 0x9, 0x428503) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(r0, 0xc00864bf, &(0x7f0000000080)={0x0, 0x1}) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r1, 0xc01064c1, &(0x7f00000000c0)={r2}) ioctl$DRM_IOCTL_MODE_CURSOR2(r0, 0xc02464bb, 0x0) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f0000000140)={&(0x7f0000000100)=[0x0, 0x0], 0x2}) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000001440)) openat$drirender128(0xffffffffffffff9c, &(0x7f0000000000), 0x600, 0x0) (async) syz_open_dev$dri(&(0x7f0000000040), 0x9, 0x428503) (async) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(r0, 0xc00864bf, &(0x7f0000000080)={0x0, 0x1}) (async) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r1, 0xc01064c1, &(0x7f00000000c0)={r2}) (async) ioctl$DRM_IOCTL_MODE_CURSOR2(r0, 0xc02464bb, 0x0) (async) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f0000000140)={&(0x7f0000000100)=[0x0, 0x0], 0x2}) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000001440)) (async) 00:20:00 executing program 0: r0 = openat$zero(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) sendmsg$qrtr(r0, &(0x7f00000007c0)={&(0x7f0000000040)={0x2a, 0xffffffffffffffff, 0x2}, 0xc, &(0x7f00000005c0)=[{&(0x7f0000000080)="d65ef4303906240b73ecf776074acbf71e31e18ef65abdccdc8480be5a62e467d15455da295af05dbe3935c26e47fe54a657", 0x32}, {&(0x7f00000000c0)="6d25f34624f849111d4289b3882b201ddef5c5b0b4ddcebfbf23c122e7fce8d2ab95ca1badddb5a2d071eeb46b432bfef52255afc7cb5e98483fa429b250fb443eaeb6aa12f46cc8f105165aa13efc2a3722fa6514335491ed3a23735a0024cb2f13f20dd628c3c5aeb79d5c2d46", 0x6e}, {&(0x7f0000000140)="07c2c4f1071416205517a59879f23a81e89a268b9ce27aaa7d92b6382f3f41257879daed9cd7b96815af89a6825ed7ffcc0edc8dfc7ff10d6ed92981bd2023d10c6a90bff513de21bf34160aeb989b900caefab05e98b360ea8360f76b1d7b0102263d913f58778f04", 0x69}, {&(0x7f00000001c0)="d64ecf786c61fb381edd146e56c8b65ec593c7f4c0db958b2a53671acd12de0701d17cdb94bcdff37b6a85a9435645ad5abab7342447d338b9a581be3329d6e738ca13efe66378d5539878747ce1e2914e5a4764b0f64240913148f48f0365a80fc73d1b818e9ff90ff5c8a42ee79a895a18c68fa91ad058f138069dbd5e671f8c1b4022b1dcbc2a814132cd6109804f81b80e520a520ab86c0280a9a6c1322800e62e60935cd95e6ae2077c20c7edb2bf478f13ba66373a3c4fc17098ec722fce2ab92b155c32df93f0355071dc485ef0edd228e5c9495a73ad5df12bcb575cffeb2db63a706b56dfcc6946bc00cc5847f8ba4e986a6187ecaa33c2a708", 0xfe}, {&(0x7f00000002c0)="7974f090e952568f4c59007db524f5ef24eb32e34626241bea4ccbb802d58afa35edbdb28af1002b5adf853bd3c18a4e3d75abd18dbe1be3dc10f347a6f61fd5d34c24601d434916", 0x48}, {&(0x7f0000000340)="f00a88ca1871d137fb5a4f6f4c5ae3436b9b13c32044b405308c7bc3ffeb942d5c6058bb8338212d7840c8d5bc7d3cc7f9d43f18eec886ec83f03fc8a10356482770e7933fa9f935e761", 0x4a}, {&(0x7f00000003c0)="4001558e83f07d375464a31d25ffe86d0fd2f2975b2c7553fedc51b54324a4bd022b47204a793dad03863dc964792ba70441c687f38eb02cf961db257a71663325dd6f74e5364982bc1de51bf3a0711d385b148e87f99d87cb553579fe61726af5ecbe5e6199464a8279a2e8bef5fd6a6754b1555e400dd6899eec6a6bd7ee2d48b56eb0075ca654c624b740bfd423a30df8bba5e28392aa1d07f927424ae714097c9eb952dead22e9cd8159cb3d1db4973dfa77a79d623c50d44f4a9dae83a28db6673f6b085f18e8e93d98f6926e4c61c83a0c21f9d956e73874e0c517a6166606623f3a675a8dff14b6ea5833443f4127801d3f8299d3a5", 0xf9}, {&(0x7f00000004c0)="5729f86e22c18972fd581f5e5299df985f37e43371b47d3770153ed77fb8149a5f66911ab60462f6fd043c3d911a5ddf553c33", 0x33}, {&(0x7f0000000500)="592ebad3d45674932f5899bd6a63ada1fe25ee131acb6d1b12b7e1a5f2a765f33f78f435b701900e273825ac0d739ee202d5c0b0f984b62be891525dcc5572710d864b14588ed99534091ff9324d2bd7ed9dc78e0f3890b4b4499c4a6bdf6399274af9654c9426ef0a81891947d100fe2387fc3f837ed8d4416a939929b37d50389f86aa25a5dc3765ebba532ac7e85ae403553ee5dbb14164399cf33ed0fb8e331c6460f88304", 0xa7}], 0x9, &(0x7f0000000680)=[{0x110, 0x110, 0x176, "d2c64300f5eb423501c5b59702abe544a97417204f346e26145e576d1fca5dc876a13f1fd26db294fac771912f8a943ae8ce843e16b420194a3435838b84e8a11622657daee360c24eb887745ebf939da72af8b1d64da02e6dd3c29ddc60172097625e6db9f6c96f0d8252c3c4a30374487c9e33a8bb81c269726c0c65342b4c370d1a1c4690795d023604f86008e12bb6f71acf80b991570089c25c14165e99824ba0368ff25ee12b78ba08996bda2eba027d91bdd20d0d8042e808b20c0afe22fb37802d158dab809ac74ed30d4d3fcc06031e9f3cae9a38cdf5a81f7f2024e1b8bd921c99af434463156f7a9dabd0108b218686693e3b780bb5bfa8bb5b"}], 0x110, 0x40804}, 0x38) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r2 = openat$zero(0xffffffffffffff9c, &(0x7f0000000800), 0x88000, 0x0) sendmsg$qrtr(r2, &(0x7f0000000cc0)={0x0, 0x0, &(0x7f0000000b40)=[{&(0x7f0000000840)="fa43cdd639ad1d9c59e05a7b6d238bcd561be50751c04212238b1dbb0065aeb8bc39cc8311239a2aa946878815aaecd1eb6939d9576be0b107a6689a87936e4990d1812e4368c6c82de0a55ca43c63ac2caf0fe726f2f2d8a849ebc47ace8bb3c927d020a88677623e739612bee73668e990a176dae7", 0x76}, {&(0x7f00000008c0)="df37f0446ff856ecaf4ed88e184df446442824ac23bcd4ada9e384fc4b024238437816d9442faa2f6fd5622fea4e5b791e7d7abc9f9f9c995c758fadc3faae915cce3d5388401d1406e8690a3777d1549ba5242ee7481a7c6a841fadb437a0c5a0dbaeb33de4a7a0b5bfba74dbcfa344f22fe557a860c7726d589db5332c3f3954c4d6ca70bf04ad8f1008424e0254aa524ceaa6a8b1eeb4ca3f5b0956117e95dde47448f1e2a67bafb412898ea2ee0632cfc3a7003874fbbd2fb7c5462460e04148326e5a5460e80cb96e477e7f3e0974c632f7", 0xd4}, {&(0x7f00000009c0)="89b0a3eee0d7628d3abe0f2ff1de24", 0xf}, {&(0x7f0000000a00)="c6c4083f3d270e2bd570ad1be5ad1846d6b3816bd530e9b3d255a65b8f61609b67ef1609551fee71bc970765065bb5596845a1fe25d0470909cb27216b3872e0529e06bfb574af44a9ad42267c3dc44cb566dc8ded8460717f855be85596b768dd2f2004c48470ed70e59c", 0x6b}, {&(0x7f0000000a80)="be4b96dbc8ec63f08b5c3510fe5ca24a980e27572dbb347988cd52e7b9b160e0de20e2d4471f5b696a93c63c32977567cb900d944b4f6cfa34e17344f6ef5278c13fabaab8859366691accea9677b83d0f3b50a99945cf1d335485a0d0d54ac664370008e4ed4f164517480d90d549f24f4939c35d5a5bd3487fa6cc3620dc1b88966a031a5d42f6dab1b5", 0x8b}], 0x5, &(0x7f0000000bc0)=[{0x50, 0xff, 0x4d8, "7c527cab7a4d167317854b5261b22ce7d6d65cd3a1e7e4cae7d61c967e76703ce800922c7bca9a74be5af6bb5cc6560c432b75ae962c3787ac"}, {0xa0, 0x29, 0x2, "ce98e782c87891f9975dbe9b09237ccdd94565956d5eed53ef2cf8f6843d6d92503f2758deb8166d309863a7ddb5549443f73a0d2324f316e0a934b380e234bb314ff47b9323a32ffd6dbc01e42c2ce95b8f4fb48212bf957bd488e80d82dcd2330ec34120ba5846fdfd56031fc7cd360c667faaa9b6e13057e7020e3efad2de9135a7945bfe5dd0e694bd"}], 0xf0, 0x20018010}, 0x38) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) 00:20:00 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x10, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:00 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x1d0}}, {0x28}}}}, 0x3f0) 00:20:00 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:00 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1800, &(0x7f0000001440)) 00:20:00 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x11, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:00 executing program 2: r0 = openat$zero(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) sendmsg$qrtr(r0, &(0x7f00000007c0)={&(0x7f0000000040)={0x2a, 0xffffffffffffffff, 0x2}, 0xc, &(0x7f00000005c0)=[{&(0x7f0000000080)="d65ef4303906240b73ecf776074acbf71e31e18ef65abdccdc8480be5a62e467d15455da295af05dbe3935c26e47fe54a657", 0x32}, {&(0x7f00000000c0)="6d25f34624f849111d4289b3882b201ddef5c5b0b4ddcebfbf23c122e7fce8d2ab95ca1badddb5a2d071eeb46b432bfef52255afc7cb5e98483fa429b250fb443eaeb6aa12f46cc8f105165aa13efc2a3722fa6514335491ed3a23735a0024cb2f13f20dd628c3c5aeb79d5c2d46", 0x6e}, {&(0x7f0000000140)="07c2c4f1071416205517a59879f23a81e89a268b9ce27aaa7d92b6382f3f41257879daed9cd7b96815af89a6825ed7ffcc0edc8dfc7ff10d6ed92981bd2023d10c6a90bff513de21bf34160aeb989b900caefab05e98b360ea8360f76b1d7b0102263d913f58778f04", 0x69}, {&(0x7f00000001c0)="d64ecf786c61fb381edd146e56c8b65ec593c7f4c0db958b2a53671acd12de0701d17cdb94bcdff37b6a85a9435645ad5abab7342447d338b9a581be3329d6e738ca13efe66378d5539878747ce1e2914e5a4764b0f64240913148f48f0365a80fc73d1b818e9ff90ff5c8a42ee79a895a18c68fa91ad058f138069dbd5e671f8c1b4022b1dcbc2a814132cd6109804f81b80e520a520ab86c0280a9a6c1322800e62e60935cd95e6ae2077c20c7edb2bf478f13ba66373a3c4fc17098ec722fce2ab92b155c32df93f0355071dc485ef0edd228e5c9495a73ad5df12bcb575cffeb2db63a706b56dfcc6946bc00cc5847f8ba4e986a6187ecaa33c2a708", 0xfe}, {&(0x7f00000002c0)="7974f090e952568f4c59007db524f5ef24eb32e34626241bea4ccbb802d58afa35edbdb28af1002b5adf853bd3c18a4e3d75abd18dbe1be3dc10f347a6f61fd5d34c24601d434916", 0x48}, {&(0x7f0000000340)="f00a88ca1871d137fb5a4f6f4c5ae3436b9b13c32044b405308c7bc3ffeb942d5c6058bb8338212d7840c8d5bc7d3cc7f9d43f18eec886ec83f03fc8a10356482770e7933fa9f935e761", 0x4a}, {&(0x7f00000003c0)="4001558e83f07d375464a31d25ffe86d0fd2f2975b2c7553fedc51b54324a4bd022b47204a793dad03863dc964792ba70441c687f38eb02cf961db257a71663325dd6f74e5364982bc1de51bf3a0711d385b148e87f99d87cb553579fe61726af5ecbe5e6199464a8279a2e8bef5fd6a6754b1555e400dd6899eec6a6bd7ee2d48b56eb0075ca654c624b740bfd423a30df8bba5e28392aa1d07f927424ae714097c9eb952dead22e9cd8159cb3d1db4973dfa77a79d623c50d44f4a9dae83a28db6673f6b085f18e8e93d98f6926e4c61c83a0c21f9d956e73874e0c517a6166606623f3a675a8dff14b6ea5833443f4127801d3f8299d3a5", 0xf9}, {&(0x7f00000004c0)="5729f86e22c18972fd581f5e5299df985f37e43371b47d3770153ed77fb8149a5f66911ab60462f6fd043c3d911a5ddf553c33", 0x33}, {&(0x7f0000000500)="592ebad3d45674932f5899bd6a63ada1fe25ee131acb6d1b12b7e1a5f2a765f33f78f435b701900e273825ac0d739ee202d5c0b0f984b62be891525dcc5572710d864b14588ed99534091ff9324d2bd7ed9dc78e0f3890b4b4499c4a6bdf6399274af9654c9426ef0a81891947d100fe2387fc3f837ed8d4416a939929b37d50389f86aa25a5dc3765ebba532ac7e85ae403553ee5dbb14164399cf33ed0fb8e331c6460f88304", 0xa7}], 0x9, &(0x7f0000000680)=[{0x110, 0x110, 0x176, "d2c64300f5eb423501c5b59702abe544a97417204f346e26145e576d1fca5dc876a13f1fd26db294fac771912f8a943ae8ce843e16b420194a3435838b84e8a11622657daee360c24eb887745ebf939da72af8b1d64da02e6dd3c29ddc60172097625e6db9f6c96f0d8252c3c4a30374487c9e33a8bb81c269726c0c65342b4c370d1a1c4690795d023604f86008e12bb6f71acf80b991570089c25c14165e99824ba0368ff25ee12b78ba08996bda2eba027d91bdd20d0d8042e808b20c0afe22fb37802d158dab809ac74ed30d4d3fcc06031e9f3cae9a38cdf5a81f7f2024e1b8bd921c99af434463156f7a9dabd0108b218686693e3b780bb5bfa8bb5b"}], 0x110, 0x40804}, 0x38) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r2 = openat$zero(0xffffffffffffff9c, &(0x7f0000000800), 0x88000, 0x0) sendmsg$qrtr(r2, &(0x7f0000000cc0)={0x0, 0x0, &(0x7f0000000b40)=[{&(0x7f0000000840)="fa43cdd639ad1d9c59e05a7b6d238bcd561be50751c04212238b1dbb0065aeb8bc39cc8311239a2aa946878815aaecd1eb6939d9576be0b107a6689a87936e4990d1812e4368c6c82de0a55ca43c63ac2caf0fe726f2f2d8a849ebc47ace8bb3c927d020a88677623e739612bee73668e990a176dae7", 0x76}, {&(0x7f00000008c0)="df37f0446ff856ecaf4ed88e184df446442824ac23bcd4ada9e384fc4b024238437816d9442faa2f6fd5622fea4e5b791e7d7abc9f9f9c995c758fadc3faae915cce3d5388401d1406e8690a3777d1549ba5242ee7481a7c6a841fadb437a0c5a0dbaeb33de4a7a0b5bfba74dbcfa344f22fe557a860c7726d589db5332c3f3954c4d6ca70bf04ad8f1008424e0254aa524ceaa6a8b1eeb4ca3f5b0956117e95dde47448f1e2a67bafb412898ea2ee0632cfc3a7003874fbbd2fb7c5462460e04148326e5a5460e80cb96e477e7f3e0974c632f7", 0xd4}, {&(0x7f00000009c0)="89b0a3eee0d7628d3abe0f2ff1de24", 0xf}, {&(0x7f0000000a00)="c6c4083f3d270e2bd570ad1be5ad1846d6b3816bd530e9b3d255a65b8f61609b67ef1609551fee71bc970765065bb5596845a1fe25d0470909cb27216b3872e0529e06bfb574af44a9ad42267c3dc44cb566dc8ded8460717f855be85596b768dd2f2004c48470ed70e59c", 0x6b}, {&(0x7f0000000a80)="be4b96dbc8ec63f08b5c3510fe5ca24a980e27572dbb347988cd52e7b9b160e0de20e2d4471f5b696a93c63c32977567cb900d944b4f6cfa34e17344f6ef5278c13fabaab8859366691accea9677b83d0f3b50a99945cf1d335485a0d0d54ac664370008e4ed4f164517480d90d549f24f4939c35d5a5bd3487fa6cc3620dc1b88966a031a5d42f6dab1b5", 0x8b}], 0x5, &(0x7f0000000bc0)=[{0x50, 0xff, 0x4d8, "7c527cab7a4d167317854b5261b22ce7d6d65cd3a1e7e4cae7d61c967e76703ce800922c7bca9a74be5af6bb5cc6560c432b75ae962c3787ac"}, {0xa0, 0x29, 0x2, "ce98e782c87891f9975dbe9b09237ccdd94565956d5eed53ef2cf8f6843d6d92503f2758deb8166d309863a7ddb5549443f73a0d2324f316e0a934b380e234bb314ff47b9323a32ffd6dbc01e42c2ce95b8f4fb48212bf957bd488e80d82dcd2330ec34120ba5846fdfd56031fc7cd360c667faaa9b6e13057e7020e3efad2de9135a7945bfe5dd0e694bd"}], 0xf0, 0x20018010}, 0x38) (async) sendmsg$qrtr(r2, &(0x7f0000000cc0)={0x0, 0x0, &(0x7f0000000b40)=[{&(0x7f0000000840)="fa43cdd639ad1d9c59e05a7b6d238bcd561be50751c04212238b1dbb0065aeb8bc39cc8311239a2aa946878815aaecd1eb6939d9576be0b107a6689a87936e4990d1812e4368c6c82de0a55ca43c63ac2caf0fe726f2f2d8a849ebc47ace8bb3c927d020a88677623e739612bee73668e990a176dae7", 0x76}, {&(0x7f00000008c0)="df37f0446ff856ecaf4ed88e184df446442824ac23bcd4ada9e384fc4b024238437816d9442faa2f6fd5622fea4e5b791e7d7abc9f9f9c995c758fadc3faae915cce3d5388401d1406e8690a3777d1549ba5242ee7481a7c6a841fadb437a0c5a0dbaeb33de4a7a0b5bfba74dbcfa344f22fe557a860c7726d589db5332c3f3954c4d6ca70bf04ad8f1008424e0254aa524ceaa6a8b1eeb4ca3f5b0956117e95dde47448f1e2a67bafb412898ea2ee0632cfc3a7003874fbbd2fb7c5462460e04148326e5a5460e80cb96e477e7f3e0974c632f7", 0xd4}, {&(0x7f00000009c0)="89b0a3eee0d7628d3abe0f2ff1de24", 0xf}, {&(0x7f0000000a00)="c6c4083f3d270e2bd570ad1be5ad1846d6b3816bd530e9b3d255a65b8f61609b67ef1609551fee71bc970765065bb5596845a1fe25d0470909cb27216b3872e0529e06bfb574af44a9ad42267c3dc44cb566dc8ded8460717f855be85596b768dd2f2004c48470ed70e59c", 0x6b}, {&(0x7f0000000a80)="be4b96dbc8ec63f08b5c3510fe5ca24a980e27572dbb347988cd52e7b9b160e0de20e2d4471f5b696a93c63c32977567cb900d944b4f6cfa34e17344f6ef5278c13fabaab8859366691accea9677b83d0f3b50a99945cf1d335485a0d0d54ac664370008e4ed4f164517480d90d549f24f4939c35d5a5bd3487fa6cc3620dc1b88966a031a5d42f6dab1b5", 0x8b}], 0x5, &(0x7f0000000bc0)=[{0x50, 0xff, 0x4d8, "7c527cab7a4d167317854b5261b22ce7d6d65cd3a1e7e4cae7d61c967e76703ce800922c7bca9a74be5af6bb5cc6560c432b75ae962c3787ac"}, {0xa0, 0x29, 0x2, "ce98e782c87891f9975dbe9b09237ccdd94565956d5eed53ef2cf8f6843d6d92503f2758deb8166d309863a7ddb5549443f73a0d2324f316e0a934b380e234bb314ff47b9323a32ffd6dbc01e42c2ce95b8f4fb48212bf957bd488e80d82dcd2330ec34120ba5846fdfd56031fc7cd360c667faaa9b6e13057e7020e3efad2de9135a7945bfe5dd0e694bd"}], 0xf0, 0x20018010}, 0x38) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) 00:20:00 executing program 3: getsockopt$ARPT_SO_GET_ENTRIES(0xffffffffffffffff, 0x0, 0x61, &(0x7f0000000040)={'filter\x00', 0x79, "df151bc3317fbbcc7c23778574b3b71f9b1bd0907f24f8f6105918d8006f0f49acb982f9edf2d9ca2180d57a1445b1b3f1247ec992105da288fb5e2432c8874f4ffd7912b301ebb740aa83afe4ec59adde2e46a9a92624ea6c88021dfdaf29d3974b0672f9ff20e9d25085d4f1727136ce815d0b19231a9c2d"}, &(0x7f0000000100)=0x9d) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:20:00 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x2b8}}, {0x28}}}}, 0x3f0) 00:20:00 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:00 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x12, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:00 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:00 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xb802}}, {0x28}}}}, 0x3f0) 00:20:00 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1b54, &(0x7f0000001440)) 00:20:00 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x18, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:00 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:00 executing program 0: getsockopt$ARPT_SO_GET_ENTRIES(0xffffffffffffffff, 0x0, 0x61, &(0x7f0000000040)={'filter\x00', 0x79, "df151bc3317fbbcc7c23778574b3b71f9b1bd0907f24f8f6105918d8006f0f49acb982f9edf2d9ca2180d57a1445b1b3f1247ec992105da288fb5e2432c8874f4ffd7912b301ebb740aa83afe4ec59adde2e46a9a92624ea6c88021dfdaf29d3974b0672f9ff20e9d25085d4f1727136ce815d0b19231a9c2d"}, &(0x7f0000000100)=0x9d) (async) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async, rerun: 32) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) (rerun: 32) 00:20:00 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xd001}}, {0x28}}}}, 0x3f0) 00:20:00 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x25, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:00 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x2154, &(0x7f0000001440)) 00:20:00 executing program 2: r0 = socket$bt_rfcomm(0x1f, 0x3, 0x3) bind$bt_rfcomm(r0, &(0x7f0000000080)={0x1f, @none, 0xa1}, 0xa) r1 = syz_open_dev$usbfs(&(0x7f0000000000), 0x7, 0x101e80) ioctl$USBDEVFS_RESETEP(r1, 0x80045503, &(0x7f0000000040)={0x1}) r2 = socket$bt_rfcomm(0x1f, 0x1, 0x3) getsockopt$bt_rfcomm_RFCOMM_LM(r2, 0x12, 0x3, &(0x7f00000000c0), &(0x7f0000000100)=0x4) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000001440)) 00:20:00 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:00 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x48, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:00 executing program 3: r0 = syz_open_dev$usbfs(&(0x7f0000000000), 0x9a76edf, 0x88000) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000001440)) 00:20:00 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xe800}}, {0x28}}}}, 0x3f0) 00:20:00 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x2154, &(0x7f0000001440)) 00:20:00 executing program 0: r0 = syz_open_dev$usbfs(&(0x7f0000000000), 0x9a76edf, 0x88000) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000001440)) 00:20:00 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x4c, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:00 executing program 4: r0 = socket$bt_rfcomm(0x1f, 0x3, 0x3) bind$bt_rfcomm(r0, &(0x7f0000000080)={0x1f, @none, 0xa1}, 0xa) r1 = syz_open_dev$usbfs(&(0x7f0000000000), 0x7, 0x101e80) ioctl$USBDEVFS_RESETEP(r1, 0x80045503, &(0x7f0000000040)={0x1}) (async) r2 = socket$bt_rfcomm(0x1f, 0x1, 0x3) getsockopt$bt_rfcomm_RFCOMM_LM(r2, 0x12, 0x3, &(0x7f00000000c0), &(0x7f0000000100)=0x4) (async, rerun: 64) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (rerun: 64) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000001440)) 00:20:00 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xff00}}, {0x28}}}}, 0x3f0) 00:20:00 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x2500, &(0x7f0000001440)) 00:20:00 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = accept4$phonet_pipe(0xffffffffffffffff, 0x0, &(0x7f0000000000), 0x800) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f0000000040), &(0x7f0000000080)=0x4) 00:20:00 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x68, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:01 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xff03}}, {0x28}}}}, 0x3f0) 00:20:01 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:01 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x6c, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:01 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x1000000}}, {0x28}}}}, 0x3f0) 00:20:01 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x3389, &(0x7f0000001440)) 00:20:01 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = accept4$phonet_pipe(0xffffffffffffffff, 0x0, &(0x7f0000000000), 0x800) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f0000000040), &(0x7f0000000080)=0x4) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) accept4$phonet_pipe(0xffffffffffffffff, 0x0, &(0x7f0000000000), 0x800) (async) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f0000000040), &(0x7f0000000080)=0x4) (async) 00:20:01 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x74, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:01 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x2000000}}, {0x28}}}}, 0x3f0) 00:20:01 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:01 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x7a, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:01 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x4000000}}, {0x28}}}}, 0x3f0) 00:20:01 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5054, &(0x7f0000001440)) 00:20:01 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0xe8, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:01 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:20:01 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x8000000}}, {0x28}}}}, 0x3f0) 00:20:01 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) socket$inet6_udplite(0xa, 0x2, 0x88) socket$inet6_udplite(0xa, 0x2, 0x88) 00:20:01 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5154, &(0x7f0000001440)) 00:20:01 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:01 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xb8020000}}, {0x28}}}}, 0x3f0) 00:20:01 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) (async) 00:20:01 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) socket$inet6_udplite(0xa, 0x2, 0x88) (async) socket$inet6_udplite(0xa, 0x2, 0x88) 00:20:01 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x2, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:01 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xd0010000}}, {0x28}}}}, 0x3f0) 00:20:01 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x3, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) [ 1201.270460][T32256] workqueue: Failed to create a rescuer kthread for wq "nfc13_nci_cmd_wq": -EINTR 00:20:01 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5254, &(0x7f0000001440)) 00:20:01 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xe8000000}}, {0x28}}}}, 0x3f0) [ 1201.534531][T32335] workqueue: Failed to create a rescuer kthread for wq "nfc21_nci_tx_wq": -EINTR 00:20:01 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000200)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000240)) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000000)) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000002c0)) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x401c5820, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000380)={&(0x7f0000000280)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000002c0), &(0x7f0000000300)=[0x0, 0x0, 0x0], &(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x26, 0x7}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r1, 0xc02064b9, &(0x7f0000000440)={&(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0, 0x0], 0x5, r2, 0xc0c0c0c0}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f00000004c0)={&(0x7f0000000480)=[r3, r4], 0x2, 0x80000}) 00:20:02 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) 00:20:02 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x4, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:02 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000200)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000240)) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000000)) (async, rerun: 32) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) (async, rerun: 32) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000002c0)) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x401c5820, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000380)={&(0x7f0000000280)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000002c0), &(0x7f0000000300)=[0x0, 0x0, 0x0], &(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x26, 0x7}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r1, 0xc02064b9, &(0x7f0000000440)={&(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0, 0x0], 0x5, r2, 0xc0c0c0c0}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f00000004c0)={&(0x7f0000000480)=[r3, r4], 0x2, 0x80000}) 00:20:02 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xff000000}}, {0x28}}}}, 0x3f0) 00:20:02 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x541b, &(0x7f0000001440)) 00:20:02 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) socket$inet6_udplite(0xa, 0x2, 0x88) 00:20:02 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x5, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:02 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xff030000}}, {0x28}}}}, 0x3f0) 00:20:02 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x6, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:02 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5421, &(0x7f0000001440)) 00:20:02 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x7, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:02 executing program 0: r0 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) syz_genetlink_get_family_id$nl802154(&(0x7f0000000040), r0) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r2 = socket$inet(0x2, 0x3, 0x9) ioctl$BTRFS_IOC_START_SYNC(r2, 0x89e0, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) 00:20:02 executing program 2: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) 00:20:02 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x100000000000000}}, {0x28}}}}, 0x3f0) 00:20:02 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) socket$inet6_udplite(0xa, 0x2, 0x88) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) socket$inet6_udplite(0xa, 0x2, 0x88) (async) 00:20:02 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x8, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:02 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x200000000000000}}, {0x28}}}}, 0x3f0) 00:20:02 executing program 3: r0 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) syz_genetlink_get_family_id$nl802154(&(0x7f0000000040), r0) (async) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r2 = socket$inet(0x2, 0x3, 0x9) ioctl$BTRFS_IOC_START_SYNC(r2, 0x89e0, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) 00:20:02 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5450, &(0x7f0000001440)) 00:20:02 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x9, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:02 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x400000000000000}}, {0x28}}}}, 0x3f0) 00:20:02 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:02 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:02 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0xa, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:02 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x800000000000000}}, {0x28}}}}, 0x3f0) 00:20:02 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x2, &(0x7f0000001440)) 00:20:02 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:02 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0xb, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:02 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5451, &(0x7f0000001440)) 00:20:02 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:02 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xb802000000000000}}, {0x28}}}}, 0x3f0) 00:20:02 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:02 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0xc, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:02 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:02 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xd001000000000000}}, {0x28}}}}, 0x3f0) 00:20:02 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) 00:20:02 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xe800000000000000}}, {0x28}}}}, 0x3f0) 00:20:02 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0xd, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:02 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5452, &(0x7f0000001440)) 00:20:02 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000080), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:20:02 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xff00000000000000}}, {0x28}}}}, 0x3f0) 00:20:02 executing program 1: r0 = openat$loop_ctrl(0xffffffffffffff9c, &(0x7f0000000040), 0x0, 0x0) ioctl$LOOP_CTL_GET_FREE(r0, 0x4c82) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(0xffffffffffffffff, 0x3ba0, &(0x7f0000000480)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000880)) r2 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x5460, 0x0) syz_open_pts(r2, 0x83) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) setsockopt$ARPT_SO_SET_REPLACE(r2, 0x0, 0x60, &(0x7f0000000040)={'filter\x00', 0x7, 0x4, 0x3e0, 0x0, 0x0, 0x1f8, 0x2f8, 0x2f8, 0x2f8, 0x4, &(0x7f0000000000), {[{{@uncond, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0x91}}}, {{@arp={@empty, @local, 0xff000000, 0xffffff00, 0x4, 0xb, {@mac=@link_local, {[0xff, 0x0, 0x0, 0xff, 0xff]}}, {@mac=@multicast, {[0x0, 0xff, 0x0, 0x0, 0x0, 0xff]}}, 0x64b, 0x401, 0x1, 0x1, 0x7, 0x4, 'veth0_macvtap\x00', 'virt_wifi0\x00', {0xff}, {}, 0x0, 0x44}, 0xc0, 0x110}, @mangle={0x50, 'mangle\x00', 0x0, {@mac=@dev={'\xaa\xaa\xaa\xaa\xaa', 0x32}, @empty, @broadcast, @dev={0xac, 0x14, 0x14, 0x1a}, 0x0, 0xffffffff}}}, {{@uncond, 0xc0, 0x100}, @unspec=@RATEEST={0x40, 'RATEEST\x00', 0x0, {'syz1\x00', 0x0, 0x8c, {0x7fff}}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x430) r4 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000500), 0x40000, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x3b88, &(0x7f0000000540)={0xc, 0x0}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES(r2, 0x3ba0, &(0x7f0000000580)={0x48, 0x7, r1, 0x0, 0x1, 0x0, 0x9, 0xfff}) ioctl$IOMMU_TEST_OP_DESTROY_ACCESS_PAGES(r4, 0x3ba0, &(0x7f0000000600)={0x48, 0x6, r5, 0x0, r6}) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000001440)) 00:20:02 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0xe, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:03 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5460, &(0x7f0000001440)) 00:20:03 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) getsockopt$PNPIPE_ENCAP(r0, 0x113, 0x1, &(0x7f0000000000), &(0x7f0000000040)=0x4) 00:20:03 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0xff03000000000000}}, {0x28}}}}, 0x3f0) 00:20:03 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x10, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:03 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x3, &(0x7f0000001440)) 00:20:03 executing program 2: r0 = openat$loop_ctrl(0xffffffffffffff9c, &(0x7f0000000040), 0x0, 0x0) ioctl$LOOP_CTL_GET_FREE(r0, 0x4c82) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(0xffffffffffffffff, 0x3ba0, &(0x7f0000000480)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000880)) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000880)) r2 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x5460, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x5460, 0x0) syz_open_pts(r2, 0x83) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) setsockopt$ARPT_SO_SET_REPLACE(r2, 0x0, 0x60, &(0x7f0000000040)={'filter\x00', 0x7, 0x4, 0x3e0, 0x0, 0x0, 0x1f8, 0x2f8, 0x2f8, 0x2f8, 0x4, &(0x7f0000000000), {[{{@uncond, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28, 'CLASSIFY\x00', 0x0, {0x91}}}, {{@arp={@empty, @local, 0xff000000, 0xffffff00, 0x4, 0xb, {@mac=@link_local, {[0xff, 0x0, 0x0, 0xff, 0xff]}}, {@mac=@multicast, {[0x0, 0xff, 0x0, 0x0, 0x0, 0xff]}}, 0x64b, 0x401, 0x1, 0x1, 0x7, 0x4, 'veth0_macvtap\x00', 'virt_wifi0\x00', {0xff}, {}, 0x0, 0x44}, 0xc0, 0x110}, @mangle={0x50, 'mangle\x00', 0x0, {@mac=@dev={'\xaa\xaa\xaa\xaa\xaa', 0x32}, @empty, @broadcast, @dev={0xac, 0x14, 0x14, 0x1a}, 0x0, 0xffffffff}}}, {{@uncond, 0xc0, 0x100}, @unspec=@RATEEST={0x40, 'RATEEST\x00', 0x0, {'syz1\x00', 0x0, 0x8c, {0x7fff}}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x430) r4 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000500), 0x40000, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x3b88, &(0x7f0000000540)={0xc, 0x0}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES(r2, 0x3ba0, &(0x7f0000000580)={0x48, 0x7, r1, 0x0, 0x1, 0x0, 0x9, 0xfff}) (async) ioctl$IOMMU_TEST_OP_ACCESS_PAGES(r2, 0x3ba0, &(0x7f0000000580)={0x48, 0x7, r1, 0x0, 0x1, 0x0, 0x9, 0xfff}) ioctl$IOMMU_TEST_OP_DESTROY_ACCESS_PAGES(r4, 0x3ba0, &(0x7f0000000600)={0x48, 0x6, r5, 0x0, r6}) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000001440)) 00:20:03 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5c00, &(0x7f0000001440)) 00:20:03 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0x2}}, {0x28}}}}, 0x3f0) 00:20:03 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) getsockopt$PNPIPE_ENCAP(r0, 0x113, 0x1, &(0x7f0000000000), &(0x7f0000000040)=0x4) 00:20:03 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x11, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:03 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000080), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:20:03 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x6054, &(0x7f0000001440)) 00:20:03 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080), &(0x7f00000000c0)=[0x0], 0x4, 0x6, 0x0, 0x1}) 00:20:03 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0x4}}, {0x28}}}}, 0x3f0) 00:20:03 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x12, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:03 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) 00:20:03 executing program 1: r0 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0], 0x1, 0xf235955baba59ed3, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000140)={r1}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000001240)) r2 = ioctl$LOOP_CTL_GET_FREE(0xffffffffffffffff, 0x4c82) ioctl$LOOP_CTL_REMOVE(r0, 0x4c81, r2) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0], 0x4, 0x2, 0x5, 0x1}) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) 00:20:03 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0x8}}, {0x28}}}}, 0x3f0) 00:20:03 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x8933, &(0x7f0000001440)) 00:20:03 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x18, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:03 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080), &(0x7f00000000c0)=[0x0], 0x4, 0x6, 0x0, 0x1}) 00:20:03 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) (async) 00:20:03 executing program 3: r0 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0], 0x1, 0xf235955baba59ed3, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000140)={r1}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000001240)) r2 = ioctl$LOOP_CTL_GET_FREE(0xffffffffffffffff, 0x4c82) ioctl$LOOP_CTL_REMOVE(r0, 0x4c81, r2) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0], 0x4, 0x2, 0x5, 0x1}) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0], 0x1, 0xf235955baba59ed3}) (async) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000140)={r1}) (async) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000001240)) (async) ioctl$LOOP_CTL_GET_FREE(0xffffffffffffffff, 0x4c82) (async) ioctl$LOOP_CTL_REMOVE(r0, 0x4c81, r2) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0], 0x4, 0x2, 0x5, 0x1}) (async) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) 00:20:03 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xe8}}, {0x28}}}}, 0x3f0) 00:20:03 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x25, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:03 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1000000, &(0x7f0000001440)) 00:20:03 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000040)) 00:20:03 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0x1d0}}, {0x28}}}}, 0x3f0) 00:20:03 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x48, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:03 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:03 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1630880, &(0x7f0000001440)) [ 1201.579708][T32351] workqueue: Failed to create a rescuer kthread for wq "nfc21_nci_cmd_wq": -EINTR [ 1203.594830][ T342] xt_check_table_hooks: 42 callbacks suppressed [ 1203.594848][ T342] x_tables: duplicate underflow at hook 1 00:20:03 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0x2b8}}, {0x28}}}}, 0x3f0) 00:20:03 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x4c, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:03 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:03 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000040)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000040)) (async) [ 1203.658723][ T351] x_tables: duplicate underflow at hook 1 00:20:03 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) 00:20:03 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x68, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:03 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xb802}}, {0x28}}}}, 0x3f0) [ 1203.745854][ T373] x_tables: duplicate underflow at hook 1 00:20:03 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1660880, &(0x7f0000001440)) 00:20:03 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:03 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x6c, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:03 executing program 4: r0 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x5451, 0x0) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(0xffffffffffffffff, 0xc02064b9, &(0x7f0000000180)={&(0x7f0000000000)=[0x0, 0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], 0x2, 0x0, 0xdededede}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(0xffffffffffffffff, 0xc02064b9, &(0x7f0000000240)={&(0x7f00000001c0)=[0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x2, 0x0, 0xfbfbfbfb}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(0xffffffffffffffff, 0xc02064b9, &(0x7f0000000300)={&(0x7f0000000280)=[0x0, 0x0], &(0x7f00000002c0)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x2, 0x0, 0xeeeeeeee}) r4 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r4, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r4, 0xc02064b9, &(0x7f0000001200)={0x0, 0x0, 0x0, r5}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000380)={&(0x7f0000000340)=[0x0, r1, r2, r3, 0x0, r5], 0x6, 0x80000}) r6 = accept$phonet_pipe(0xffffffffffffffff, &(0x7f0000000100), &(0x7f0000000040)=0xb) getsockopt$PNPIPE_INITSTATE(r6, 0x113, 0x4, &(0x7f0000000080), &(0x7f00000000c0)=0x4) r7 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r7, 0x0, &(0x7f0000001440)) [ 1203.834777][ T393] x_tables: duplicate underflow at hook 1 00:20:03 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xd001}}, {0x28}}}}, 0x3f0) 00:20:03 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000100)) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) 00:20:03 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x74, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:03 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x2000000, &(0x7f0000001440)) [ 1203.921955][ T404] x_tables: duplicate underflow at hook 1 00:20:03 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xe800}}, {0x28}}}}, 0x3f0) 00:20:03 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:03 executing program 3: r0 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x5451, 0x0) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(0xffffffffffffffff, 0xc02064b9, &(0x7f0000000180)={&(0x7f0000000000)=[0x0, 0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], 0x2, 0x0, 0xdededede}) (async, rerun: 32) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(0xffffffffffffffff, 0xc02064b9, &(0x7f0000000240)={&(0x7f00000001c0)=[0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x2, 0x0, 0xfbfbfbfb}) (async, rerun: 32) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(0xffffffffffffffff, 0xc02064b9, &(0x7f0000000300)={&(0x7f0000000280)=[0x0, 0x0], &(0x7f00000002c0)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x2, 0x0, 0xeeeeeeee}) r4 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r4, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r4, 0xc02064b9, &(0x7f0000001200)={0x0, 0x0, 0x0, r5}) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000380)={&(0x7f0000000340)=[0x0, r1, r2, r3, 0x0, r5], 0x6, 0x80000}) (async, rerun: 64) r6 = accept$phonet_pipe(0xffffffffffffffff, &(0x7f0000000100), &(0x7f0000000040)=0xb) (rerun: 64) getsockopt$PNPIPE_INITSTATE(r6, 0x113, 0x4, &(0x7f0000000080), &(0x7f00000000c0)=0x4) r7 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r7, 0x0, &(0x7f0000001440)) 00:20:03 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x2660840, &(0x7f0000001440)) 00:20:03 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x7a, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) [ 1204.041159][ T441] x_tables: duplicate underflow at hook 1 00:20:04 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) (async, rerun: 32) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000100)) (async, rerun: 32) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) 00:20:04 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xff00}}, {0x28}}}}, 0x3f0) 00:20:04 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x4, &(0x7f0000001440)) 00:20:04 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0xe8, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:04 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x3000000, &(0x7f0000001440)) 00:20:04 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xff03}}, {0x28}}}}, 0x3f0) [ 1204.130671][ T454] x_tables: duplicate underflow at hook 1 00:20:04 executing program 3: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000000), 0x480, 0x0) r1 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r1, 0xc05064a7, &(0x7f00000006c0)={&(0x7f0000000500), 0x0, &(0x7f0000000640)=[0x0], &(0x7f0000000680), 0x0, 0x1, 0x0, 0x0, r2}) ioctl$DRM_IOCTL_MODE_SETPROPERTY(r1, 0xc01064ab, &(0x7f0000000740)={0x0, r3, r2}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x3, 0x5, 0x9, 0x6}) ioctl$DRM_IOCTL_MODE_GET_LEASE(0xffffffffffffffff, 0xc01064c8, &(0x7f00000001c0)={0x8, 0x0, &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]}) r6 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r6, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r6, 0xc05064a7, &(0x7f00000003c0)={0x0, 0x0, &(0x7f0000000540)=[0x0], 0x0, 0x0, 0x1, 0x0, 0x0, r7}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000240)={&(0x7f0000000200)=[r2, r4, r5, 0x0, r7], 0x5}) r8 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r8, 0x0, &(0x7f0000001440)) 00:20:04 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:04 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) (async) 00:20:04 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x4000000, &(0x7f0000001440)) [ 1204.196477][ T469] x_tables: duplicate underflow at hook 1 00:20:04 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) setsockopt$inet_mtu(0xffffffffffffffff, 0x0, 0xa, &(0x7f0000000040)=0x2, 0x4) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:04 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x2]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) [ 1204.243514][ T480] x_tables: duplicate underflow at hook 1 00:20:04 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0x1000000}}, {0x28}}}}, 0x3f0) 00:20:04 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x3]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:04 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) setsockopt$inet_mtu(0xffffffffffffffff, 0x0, 0xa, &(0x7f0000000040)=0x2, 0x4) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) setsockopt$inet_mtu(0xffffffffffffffff, 0x0, 0xa, &(0x7f0000000040)=0x2, 0x4) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) (async) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000080)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) 00:20:04 executing program 3: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000000), 0x480, 0x0) r1 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r1, 0xc05064a7, &(0x7f00000006c0)={&(0x7f0000000500), 0x0, &(0x7f0000000640)=[0x0], &(0x7f0000000680), 0x0, 0x1, 0x0, 0x0, r2}) ioctl$DRM_IOCTL_MODE_SETPROPERTY(r1, 0xc01064ab, &(0x7f0000000740)={0x0, r3, r2}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x3, 0x5, 0x9, 0x6}) ioctl$DRM_IOCTL_MODE_GET_LEASE(0xffffffffffffffff, 0xc01064c8, &(0x7f00000001c0)={0x8, 0x0, &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]}) r6 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r6, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r6, 0xc05064a7, &(0x7f00000003c0)={0x0, 0x0, &(0x7f0000000540)=[0x0], 0x0, 0x0, 0x1, 0x0, 0x0, r7}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000240)={&(0x7f0000000200)=[r2, r4, r5, 0x0, r7], 0x5}) r8 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r8, 0x0, &(0x7f0000001440)) 00:20:04 executing program 4: openat$drirender128(0xffffffffffffff9c, &(0x7f0000000000), 0x480, 0x0) (async) r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000000), 0x480, 0x0) r1 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r1, 0xc05064a7, &(0x7f00000006c0)={&(0x7f0000000500), 0x0, &(0x7f0000000640)=[0x0], &(0x7f0000000680), 0x0, 0x1, 0x0, 0x0, r2}) ioctl$DRM_IOCTL_MODE_SETPROPERTY(r1, 0xc01064ab, &(0x7f0000000740)={0x0, r3, r2}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x3, 0x5, 0x9, 0x6}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x3, 0x5, 0x9, 0x6}) ioctl$DRM_IOCTL_MODE_GET_LEASE(0xffffffffffffffff, 0xc01064c8, &(0x7f00000001c0)={0x8, 0x0, &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]}) syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) (async) r6 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r6, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r6, 0xc05064a7, &(0x7f00000003c0)={0x0, 0x0, &(0x7f0000000540)=[0x0], 0x0, 0x0, 0x1, 0x0, 0x0, r7}) (async) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r6, 0xc05064a7, &(0x7f00000003c0)={0x0, 0x0, &(0x7f0000000540)=[0x0], 0x0, 0x0, 0x1, 0x0, 0x0, r7}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000240)={&(0x7f0000000200)=[r2, r4, r5, 0x0, r7], 0x5}) r8 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r8, 0x0, &(0x7f0000001440)) [ 1204.357173][ T506] x_tables: duplicate underflow at hook 1 00:20:04 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0x2000000}}, {0x28}}}}, 0x3f0) 00:20:04 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x4]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:04 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:04 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5000000, &(0x7f0000001440)) 00:20:04 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:20:04 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x5]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:04 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0x4000000}}, {0x28}}}}, 0x3f0) 00:20:04 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) ioctl$sock_inet_SIOCADDRT(r0, 0x8941, &(0x7f0000001100)={0x0, {0x2, 0x0, @broadcast}, {0x2, 0x0, @initdev={0xac, 0x1e, 0x0, 0x0}}, {0x2, 0x0, @multicast1}}) ioctl$sock_inet_SIOCADDRT(r0, 0x890b, &(0x7f00000000c0)={0x0, {0x2, 0x4e23, @remote}, {0x2, 0x4e20, @broadcast}, {0x2, 0x4e24, @multicast2}, 0x324, 0x0, 0x0, 0x0, 0x8, &(0x7f0000000080)='erspan0\x00', 0x2, 0x709, 0xffc0}) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000040)) 00:20:04 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) 00:20:04 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0x8000000}}, {0x28}}}}, 0x3f0) 00:20:04 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x6]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:04 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x6000000, &(0x7f0000001440)) 00:20:04 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) (async) 00:20:04 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xb8020000}}, {0x28}}}}, 0x3f0) 00:20:04 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x7]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:04 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) ioctl$sock_inet_SIOCADDRT(r0, 0x8941, &(0x7f0000001100)={0x0, {0x2, 0x0, @broadcast}, {0x2, 0x0, @initdev={0xac, 0x1e, 0x0, 0x0}}, {0x2, 0x0, @multicast1}}) ioctl$sock_inet_SIOCADDRT(r0, 0x890b, &(0x7f00000000c0)={0x0, {0x2, 0x4e23, @remote}, {0x2, 0x4e20, @broadcast}, {0x2, 0x4e24, @multicast2}, 0x324, 0x0, 0x0, 0x0, 0x8, &(0x7f0000000080)='erspan0\x00', 0x2, 0x709, 0xffc0}) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000040)) 00:20:04 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) ioctl$sock_inet_SIOCADDRT(r0, 0x8941, &(0x7f0000001100)={0x0, {0x2, 0x0, @broadcast}, {0x2, 0x0, @initdev={0xac, 0x1e, 0x0, 0x0}}, {0x2, 0x0, @multicast1}}) ioctl$sock_inet_SIOCADDRT(r0, 0x890b, &(0x7f00000000c0)={0x0, {0x2, 0x4e23, @remote}, {0x2, 0x4e20, @broadcast}, {0x2, 0x4e24, @multicast2}, 0x324, 0x0, 0x0, 0x0, 0x8, &(0x7f0000000080)='erspan0\x00', 0x2, 0x709, 0xffc0}) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000040)) 00:20:04 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xd0010000}}, {0x28}}}}, 0x3f0) 00:20:04 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x8]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:04 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r1, 0x0, 0x19, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:04 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x9]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:04 executing program 1: ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000001440)) 00:20:04 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xe8000000}}, {0x28}}}}, 0x3f0) 00:20:04 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) ioctl$sock_inet_SIOCADDRT(r0, 0x8941, &(0x7f0000001100)={0x0, {0x2, 0x0, @broadcast}, {0x2, 0x0, @initdev={0xac, 0x1e, 0x0, 0x0}}, {0x2, 0x0, @multicast1}}) ioctl$sock_inet_SIOCADDRT(r0, 0x890b, &(0x7f00000000c0)={0x0, {0x2, 0x4e23, @remote}, {0x2, 0x4e20, @broadcast}, {0x2, 0x4e24, @multicast2}, 0x324, 0x0, 0x0, 0x0, 0x8, &(0x7f0000000080)='erspan0\x00', 0x2, 0x709, 0xffc0}) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000040)) 00:20:04 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0xa]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:04 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x7000000, &(0x7f0000001440)) 00:20:04 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r1 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r1, 0x0, 0x19, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) (async, rerun: 64) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000000)) (async, rerun: 64) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:04 executing program 2: ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000001440)) 00:20:04 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0xb]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:04 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xff000000}}, {0x28}}}}, 0x3f0) 00:20:04 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = accept$phonet_pipe(r0, &(0x7f0000000000), &(0x7f0000000040)=0x10) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000080)) 00:20:04 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0xc]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:04 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xff030000}}, {0x28}}}}, 0x3f0) 00:20:04 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) ioctl$sock_inet_SIOCADDRT(r0, 0x8941, &(0x7f0000001100)={0x0, {0x2, 0x0, @broadcast}, {0x2, 0x0, @initdev={0xac, 0x1e, 0x0, 0x0}}, {0x2, 0x0, @multicast1}}) (async) ioctl$sock_inet_SIOCADDRT(r0, 0x890b, &(0x7f00000000c0)={0x0, {0x2, 0x4e23, @remote}, {0x2, 0x4e20, @broadcast}, {0x2, 0x4e24, @multicast2}, 0x324, 0x0, 0x0, 0x0, 0x8, &(0x7f0000000080)='erspan0\x00', 0x2, 0x709, 0xffc0}) (async) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000040)) 00:20:05 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5, &(0x7f0000001440)) 00:20:05 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0x100000000000000}}, {0x28}}}}, 0x3f0) 00:20:05 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x8000000, &(0x7f0000001440)) 00:20:05 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) r1 = accept$phonet_pipe(r0, &(0x7f0000000000), &(0x7f0000000040)=0x10) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000080)) 00:20:05 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0xd]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:05 executing program 1: ioctl$DRM_IOCTL_MODE_GETGAMMA(0xffffffffffffffff, 0xc02064a4, &(0x7f00000000c0)={0x0, 0x0, &(0x7f0000000000), &(0x7f0000000040)=[0x2, 0x4, 0x6], &(0x7f0000000080)=[0x0, 0x4]}) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) r1 = openat$zero(0xffffffffffffff9c, &(0x7f0000000000), 0xd00, 0x0) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(r0, 0x3ba0, &(0x7f0000000100)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f00000001c0)={0x48, 0x8, r2, 0x0, 0x8, 0x208e86, 0xc, &(0x7f0000000180)="93858646fce75b50be8e1132", 0x10006}) 00:20:05 executing program 0: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:05 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0x200000000000000}}, {0x28}}}}, 0x3f0) 00:20:05 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x9000000, &(0x7f0000001440)) 00:20:05 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x2, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000), &(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0], 0x0, 0xa, 0x3, 0x2}) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000140)) 00:20:05 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0xe]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:05 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:05 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x6, &(0x7f0000001440)) 00:20:05 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x10]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:05 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0x400000000000000}}, {0x28}}}}, 0x3f0) 00:20:05 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x9940440, &(0x7f0000001440)) 00:20:05 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x2, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x2, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000), &(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0], 0x0, 0xa, 0x3, 0x2}) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000140)) (async) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000140)) 00:20:05 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xa000000, &(0x7f0000001440)) 00:20:05 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0x800000000000000}}, {0x28}}}}, 0x3f0) 00:20:05 executing program 0: ioctl$DRM_IOCTL_MODE_GETGAMMA(0xffffffffffffffff, 0xc02064a4, &(0x7f00000000c0)={0x0, 0x0, &(0x7f0000000000), &(0x7f0000000040)=[0x2, 0x4, 0x6], &(0x7f0000000080)=[0x0, 0x4]}) (async, rerun: 64) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) (rerun: 64) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) (async) r1 = openat$zero(0xffffffffffffff9c, &(0x7f0000000000), 0xd00, 0x0) (async, rerun: 32) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(r0, 0x3ba0, &(0x7f0000000100)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff}) (rerun: 32) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f00000001c0)={0x48, 0x8, r2, 0x0, 0x8, 0x208e86, 0xc, &(0x7f0000000180)="93858646fce75b50be8e1132", 0x10006}) 00:20:05 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x11]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:05 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:05 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x7, &(0x7f0000001440)) 00:20:05 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x12]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:05 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xb802000000000000}}, {0x28}}}}, 0x3f0) 00:20:05 executing program 0: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x55b4b64b, 0x80000) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000080)={&(0x7f0000000040)=[0x0], 0x1}) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) 00:20:05 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xb000000, &(0x7f0000001440)) 00:20:05 executing program 2: r0 = openat$loop_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x80280, 0x0) ioctl$LOOP_CTL_ADD(r0, 0x4c80, 0xc) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, 0x0) 00:20:05 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x2, 0x0) 00:20:05 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x18]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:05 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xd001000000000000}}, {0x28}}}}, 0x3f0) 00:20:05 executing program 3: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x55b4b64b, 0x80000) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000080)={&(0x7f0000000040)=[0x0], 0x1}) (async) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) 00:20:05 executing program 2: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f00000000c0), 0x0, 0x0) mmap$usbfs(&(0x7f0000ffa000/0x3000)=nil, 0x3000, 0x0, 0x10, r0, 0x0) r1 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) r2 = syz_genetlink_get_family_id$nl802154(&(0x7f00000002c0), 0xffffffffffffffff) sendmsg$NL802154_CMD_SET_MAX_FRAME_RETRIES(r1, &(0x7f0000000380)={&(0x7f0000000280), 0xc, &(0x7f0000000340)={&(0x7f0000000300)={0x2c, r2, 0x1, 0x0, 0x0, {}, [@NL802154_ATTR_WPAN_DEV={0xc}, @NL802154_ATTR_WPAN_DEV={0xc}]}, 0x2c}}, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0], 0x6, 0x2, 0x5, 0x1}) ioctl$DRM_IOCTL_MODE_GET_LEASE(r0, 0xc01064c8, &(0x7f0000000180)={0x7, 0x0, &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]}) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:05 executing program 0: r0 = openat$loop_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x80280, 0x0) ioctl$LOOP_CTL_ADD(r0, 0x4c80, 0xc) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, 0x0) 00:20:05 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x8, &(0x7f0000001440)) 00:20:05 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xe800000000000000}}, {0x28}}}}, 0x3f0) 00:20:05 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:05 executing program 0: gettid() r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) r1 = syz_open_dev$dri(&(0x7f0000000080), 0x800, 0x400) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f00000001c0)={&(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x5, 0x1, 0x6, 0x6}) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:05 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xb6620c0, &(0x7f0000001440)) 00:20:05 executing program 4: r0 = openat$loop_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x80280, 0x0) ioctl$LOOP_CTL_ADD(r0, 0x4c80, 0xc) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, 0x0) 00:20:05 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x48]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:05 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xff00000000000000}}, {0x28}}}}, 0x3f0) 00:20:05 executing program 1: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f00000000c0), 0x0, 0x0) mmap$usbfs(&(0x7f0000ffa000/0x3000)=nil, 0x3000, 0x0, 0x10, r0, 0x0) (async) mmap$usbfs(&(0x7f0000ffa000/0x3000)=nil, 0x3000, 0x0, 0x10, r0, 0x0) syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) (async) r1 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) syz_genetlink_get_family_id$nl802154(&(0x7f00000002c0), 0xffffffffffffffff) (async) r2 = syz_genetlink_get_family_id$nl802154(&(0x7f00000002c0), 0xffffffffffffffff) sendmsg$NL802154_CMD_SET_MAX_FRAME_RETRIES(r1, &(0x7f0000000380)={&(0x7f0000000280), 0xc, &(0x7f0000000340)={&(0x7f0000000300)={0x2c, r2, 0x1, 0x0, 0x0, {}, [@NL802154_ATTR_WPAN_DEV={0xc}, @NL802154_ATTR_WPAN_DEV={0xc}]}, 0x2c}}, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0], 0x6, 0x2, 0x5, 0x1}) ioctl$DRM_IOCTL_MODE_GET_LEASE(r0, 0xc01064c8, &(0x7f0000000180)={0x7, 0x0, &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]}) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:05 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8, 0x0, {0x0, 0xff03000000000000}}, {0x28}}}}, 0x3f0) 00:20:05 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x4c]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:05 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc000000, &(0x7f0000001440)) 00:20:05 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x9, &(0x7f0000001440)) 00:20:05 executing program 4: r0 = openat$loop_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x80280, 0x0) ioctl$LOOP_CTL_ADD(r0, 0x4c80, 0xc) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, 0x0) openat$loop_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x80280, 0x0) (async) ioctl$LOOP_CTL_ADD(r0, 0x4c80, 0xc) (async) openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, 0x0) (async) 00:20:06 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:06 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:06 executing program 4: gettid() (async, rerun: 64) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (rerun: 64) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) (async) r1 = syz_open_dev$dri(&(0x7f0000000080), 0x800, 0x400) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f00000001c0)={&(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x5, 0x1, 0x6, 0x6}) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:06 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000000)) 00:20:06 executing program 0: ioctl$BTRFS_IOC_START_SYNC(0xffffffffffffffff, 0x80089418, &(0x7f0000000000)) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x10000, 0x0) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000100), 0x400000, 0x0) r2 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) accept4$phonet_pipe(r2, 0x0, 0x0, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f0000000180)={0x48, 0x8, r2, 0x0, 0x9, 0x1111d0, 0x2b, &(0x7f0000000140)="41dac8ab74bc0fac5d974ed94cec6f0c240990a8b5d82c6d21d8b51b8fe6db7eb2432e6a6f5e33b633d86e", 0x5}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000080)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x3, 0xa3d98, 0x3c, &(0x7f0000000040)="ca655a2a99ec152d9fde4ecd503912d378fe266354c10bd28f5969ee7fe05ea7dbac8421cd050c8ad18f53886b520dd4fa54ed56439caf27cd6f24c6", 0x10000}) 00:20:06 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xd000000, &(0x7f0000001440)) 00:20:06 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28, '\x00', 0x2}}}}, 0x3f0) 00:20:06 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x6c]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:06 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28, '\x00', 0x4}}}}, 0x3f0) 00:20:06 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xa, &(0x7f0000001440)) 00:20:06 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xd942040, &(0x7f0000001440)) 00:20:06 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x74]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:06 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000000)) 00:20:06 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28, '\x00', 0x8}}}}, 0x3f0) 00:20:06 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x7a]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:06 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28, '\x00', 0xe8}}}}, 0x3f0) 00:20:06 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xb, &(0x7f0000001440)) 00:20:06 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xe000000, &(0x7f0000001440)) 00:20:06 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0xe8]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:06 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:06 executing program 0: ioctl$BTRFS_IOC_START_SYNC(0xffffffffffffffff, 0x80089418, &(0x7f0000000000)) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x10000, 0x0) openat$iommufd(0xffffffffffffff9c, &(0x7f0000000100), 0x400000, 0x0) (async) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000100), 0x400000, 0x0) r2 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) accept4$phonet_pipe(r2, 0x0, 0x0, 0x0) (async) accept4$phonet_pipe(r2, 0x0, 0x0, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f0000000180)={0x48, 0x8, r2, 0x0, 0x9, 0x1111d0, 0x2b, &(0x7f0000000140)="41dac8ab74bc0fac5d974ed94cec6f0c240990a8b5d82c6d21d8b51b8fe6db7eb2432e6a6f5e33b633d86e", 0x5}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000080)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x3, 0xa3d98, 0x3c, &(0x7f0000000040)="ca655a2a99ec152d9fde4ecd503912d378fe266354c10bd28f5969ee7fe05ea7dbac8421cd050c8ad18f53886b520dd4fa54ed56439caf27cd6f24c6", 0x10000}) 00:20:06 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:06 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xb, &(0x7f0000001440)) 00:20:06 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc, &(0x7f0000001440)) 00:20:06 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x10, 0x0) 00:20:06 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x10000000, &(0x7f0000001440)) 00:20:06 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x2]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:06 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:06 executing program 0: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE_FD(r0, 0xc01064c2, &(0x7f0000000100)) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000180), 0x109002, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f0000000100)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x8, 0x2bf693, 0xd9, &(0x7f0000000000)="f36248929de839a336830a9abd4b80f4b1df9a0880bd8cb7ed962c31adb6b06ee224648f2264df072251c1b0d227c42001a8b04ac451f2c6f7194f17d0f6d88c8257a605529cb386b2be681f3a3d3920f0189c1930f73287baf6b3f21655b3d38c8cae8806cf90a4d5ba0adeee38bda2bda31c2564597f26a082875510b3ff9c1b2b3fd22d12aaa05c955e112c51594df877d8fe353a012d2253c103b559624c4ccb2d033ecf2519677201a7eadfd7064086bc01198df6683977652075254a11179de80621b6770894c7af0afa5b448976d0a3343ccbff9d1a", 0x5}) 00:20:06 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xd, &(0x7f0000001440)) 00:20:06 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x3]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:06 executing program 0: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x541b, 0x0) 00:20:06 executing program 2: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000040), 0x6800, 0x0) r1 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x149841, 0x0) r2 = ioctl$LOOP_CTL_ADD(r1, 0x4c80, 0xa) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000000200)={&(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0], 0x4, 0x80000, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r1, 0xc00464c9, &(0x7f0000000240)={r3}) ioctl$LOOP_CTL_ADD(r1, 0x4c80, r2) r4 = socket$inet(0x2, 0x3, 0x9) ioctl$sock_inet_SIOCADDRT(r4, 0x890b, &(0x7f0000001100)={0x0, {0x2, 0x0, @broadcast}, {0x2, 0x0, @initdev={0xac, 0x1e, 0x0, 0x0}}, {0x2, 0x0, @multicast1}}) ioctl$sock_bt_bnep_BNEPCONNADD(r1, 0x400442c8, &(0x7f00000000c0)={r4, 0x9, 0x6, "f44e057d3ceb0afec836ba07dc459c4551bd1d5c789c75a1e60db2810dff58cea04f4d1331c8db04a1cb02acc69a2a6a385bd57c9f"}) setsockopt$PNPIPE_INITSTATE(r0, 0x113, 0x4, &(0x7f0000000080)=0x1, 0x4) syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$sock_inet_SIOCADDRT(r4, 0x890b, &(0x7f0000000140)={0x0, {0x2, 0x4e22, @local}, {0x2, 0x4e21, @empty}, {0x2, 0x4e21, @loopback}, 0x4, 0x0, 0x0, 0x0, 0xfff, &(0x7f0000000100)='gretap0\x00', 0x7fff, 0xaa, 0x3ff}) 00:20:06 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x11000000, &(0x7f0000001440)) 00:20:07 executing program 4: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000040), 0x6800, 0x0) r1 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x149841, 0x0) r2 = ioctl$LOOP_CTL_ADD(r1, 0x4c80, 0xa) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000000200)={&(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0], 0x4, 0x80000, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r1, 0xc00464c9, &(0x7f0000000240)={r3}) ioctl$LOOP_CTL_ADD(r1, 0x4c80, r2) r4 = socket$inet(0x2, 0x3, 0x9) ioctl$sock_inet_SIOCADDRT(r4, 0x890b, &(0x7f0000001100)={0x0, {0x2, 0x0, @broadcast}, {0x2, 0x0, @initdev={0xac, 0x1e, 0x0, 0x0}}, {0x2, 0x0, @multicast1}}) ioctl$sock_bt_bnep_BNEPCONNADD(r1, 0x400442c8, &(0x7f00000000c0)={r4, 0x9, 0x6, "f44e057d3ceb0afec836ba07dc459c4551bd1d5c789c75a1e60db2810dff58cea04f4d1331c8db04a1cb02acc69a2a6a385bd57c9f"}) setsockopt$PNPIPE_INITSTATE(r0, 0x113, 0x4, &(0x7f0000000080)=0x1, 0x4) syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$sock_inet_SIOCADDRT(r4, 0x890b, &(0x7f0000000140)={0x0, {0x2, 0x4e22, @local}, {0x2, 0x4e21, @empty}, {0x2, 0x4e21, @loopback}, 0x4, 0x0, 0x0, 0x0, 0xfff, &(0x7f0000000100)='gretap0\x00', 0x7fff, 0xaa, 0x3ff}) 00:20:07 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x4]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:07 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:07 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xe, &(0x7f0000001440)) 00:20:07 executing program 1: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000040), 0x6800, 0x0) r1 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x149841, 0x0) r2 = ioctl$LOOP_CTL_ADD(r1, 0x4c80, 0xa) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000000200)={&(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0], 0x4, 0x80000, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r1, 0xc00464c9, &(0x7f0000000240)={r3}) ioctl$LOOP_CTL_ADD(r1, 0x4c80, r2) r4 = socket$inet(0x2, 0x3, 0x9) ioctl$sock_inet_SIOCADDRT(r4, 0x890b, &(0x7f0000001100)={0x0, {0x2, 0x0, @broadcast}, {0x2, 0x0, @initdev={0xac, 0x1e, 0x0, 0x0}}, {0x2, 0x0, @multicast1}}) ioctl$sock_bt_bnep_BNEPCONNADD(r1, 0x400442c8, &(0x7f00000000c0)={r4, 0x9, 0x6, "f44e057d3ceb0afec836ba07dc459c4551bd1d5c789c75a1e60db2810dff58cea04f4d1331c8db04a1cb02acc69a2a6a385bd57c9f"}) setsockopt$PNPIPE_INITSTATE(r0, 0x113, 0x4, &(0x7f0000000080)=0x1, 0x4) syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$sock_inet_SIOCADDRT(r4, 0x890b, &(0x7f0000000140)={0x0, {0x2, 0x4e22, @local}, {0x2, 0x4e21, @empty}, {0x2, 0x4e21, @loopback}, 0x4, 0x0, 0x0, 0x0, 0xfff, &(0x7f0000000100)='gretap0\x00', 0x7fff, 0xaa, 0x3ff}) 00:20:07 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = socket$l2tp(0x2, 0x2, 0x73) setsockopt$inet_mtu(r1, 0x0, 0xa, &(0x7f0000000000)=0x3, 0x4) 00:20:07 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x5]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:07 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x12000000, &(0x7f0000001440)) 00:20:07 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:07 executing program 2: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE_FD(r0, 0xc01064c2, &(0x7f0000000100)) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000180), 0x109002, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, 0x0) (async) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f0000000100)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x8, 0x2bf693, 0xd9, &(0x7f0000000000)="f36248929de839a336830a9abd4b80f4b1df9a0880bd8cb7ed962c31adb6b06ee224648f2264df072251c1b0d227c42001a8b04ac451f2c6f7194f17d0f6d88c8257a605529cb386b2be681f3a3d3920f0189c1930f73287baf6b3f21655b3d38c8cae8806cf90a4d5ba0adeee38bda2bda31c2564597f26a082875510b3ff9c1b2b3fd22d12aaa05c955e112c51594df877d8fe353a012d2253c103b559624c4ccb2d033ecf2519677201a7eadfd7064086bc01198df6683977652075254a11179de80621b6770894c7af0afa5b448976d0a3343ccbff9d1a", 0x5}) 00:20:07 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xe, &(0x7f0000001440)) 00:20:07 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = socket$l2tp(0x2, 0x2, 0x73) setsockopt$inet_mtu(r1, 0x0, 0xa, &(0x7f0000000000)=0x3, 0x4) (async) setsockopt$inet_mtu(r1, 0x0, 0xa, &(0x7f0000000000)=0x3, 0x4) 00:20:07 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x6]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:07 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x10, &(0x7f0000001440)) 00:20:07 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x18000000, &(0x7f0000001440)) 00:20:08 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f00000000c0)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000100)) 00:20:08 executing program 4: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000000), 0x50000, 0x0) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(0xffffffffffffffff, 0xc02064b9, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0], 0x8, 0x0, 0xb0b0b0b0}) r2 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_CURSOR2(r2, 0xc02464bb, &(0x7f00000001c0)) ioctl$DRM_IOCTL_MODE_GETENCODER(r0, 0xc01464a6, &(0x7f00000001c0)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_CURSOR2(r2, 0xc02464bb, &(0x7f0000000200)={0x1, r3, 0x309d4802, 0x1, 0x2d12, 0x4, 0x10000, 0x7, 0x81}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000180)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0], 0x7, r1, 0xb0b0b0b0}) r4 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r4, 0x3b88, 0x0) 00:20:08 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:08 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x7]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:08 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x11, &(0x7f0000001440)) 00:20:08 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1b540000, &(0x7f0000001440)) 00:20:08 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x8]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:08 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:08 executing program 1: openat$drirender128(0xffffffffffffff9c, &(0x7f0000000000), 0x50000, 0x0) (async) r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000000), 0x50000, 0x0) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(0xffffffffffffffff, 0xc02064b9, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0], 0x8, 0x0, 0xb0b0b0b0}) r2 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_CURSOR2(r2, 0xc02464bb, &(0x7f00000001c0)) ioctl$DRM_IOCTL_MODE_GETENCODER(r0, 0xc01464a6, &(0x7f00000001c0)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_CURSOR2(r2, 0xc02464bb, &(0x7f0000000200)={0x1, r3, 0x309d4802, 0x1, 0x2d12, 0x4, 0x10000, 0x7, 0x81}) (async) ioctl$DRM_IOCTL_MODE_CURSOR2(r2, 0xc02464bb, &(0x7f0000000200)={0x1, r3, 0x309d4802, 0x1, 0x2d12, 0x4, 0x10000, 0x7, 0x81}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000180)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0], 0x7, r1, 0xb0b0b0b0}) openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) (async) r4 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r4, 0x3b88, 0x0) 00:20:08 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x12, &(0x7f0000001440)) 00:20:08 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f00000000c0)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) (async) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000040)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000100)) 00:20:08 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x9]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:08 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1f581c80, &(0x7f0000001440)) 00:20:08 executing program 4: r0 = socket$nl_generic(0x10, 0x3, 0x10) getsockopt$PNPIPE_IFINDEX(0xffffffffffffffff, 0x113, 0x2, &(0x7f0000000040)=0x0, &(0x7f0000000080)=0x4) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(0xffffffffffffffff, 0x89f2, &(0x7f0000000140)={'ip6tnl0\x00', &(0x7f00000000c0)={'ip6tnl0\x00', 0x0, 0x2f, 0x3, 0x20, 0x4, 0x42, @empty, @initdev={0xfe, 0x88, '\x00', 0x1, 0x0}, 0x8, 0x700, 0x7, 0x3}}) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(0xffffffffffffffff, 0x89f2, &(0x7f0000000200)={'syztnl2\x00', &(0x7f0000000180)={'ip6_vti0\x00', 0x0, 0x2b, 0xbe, 0x1, 0x101, 0x60, @private1={0xfc, 0x1, '\x00', 0x1}, @private0={0xfc, 0x0, '\x00', 0x1}, 0x1, 0x8, 0x0, 0x7}}) getsockopt$PNPIPE_IFINDEX(0xffffffffffffffff, 0x113, 0x2, &(0x7f0000000240)=0x0, &(0x7f0000000280)=0x4) r5 = syz_genetlink_get_family_id$ethtool(&(0x7f0000000900), r0) getsockopt$PNPIPE_IFINDEX(0xffffffffffffffff, 0x113, 0x2, &(0x7f0000000940)=0x0, &(0x7f0000000980)=0x4) sendmsg$ETHTOOL_MSG_EEE_GET(r0, &(0x7f0000000a80)={&(0x7f00000008c0)={0x10, 0x0, 0x0, 0x40000000}, 0xc, &(0x7f0000000a40)={&(0x7f00000009c0)=ANY=[@ANYBLOB="80000000", @ANYRES16=r5, @ANYBLOB="080029bd7000fedbdf2517000000280001801400020070696d36726567000000000000000000080003000200000008000300010000003000018008000100", @ANYRES32=r2, @ANYBLOB="08000300020000000800030001000000140002006261746164765f736c6176655f3000001400018008000100", @ANYRES32=r4, @ANYBLOB="fd010100", @ANYRES32=r6, @ANYBLOB], 0x80}, 0x1, 0x0, 0x0, 0x40040}, 0x8004) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(0xffffffffffffffff, 0x89f2, &(0x7f0000000340)={'ip6_vti0\x00', &(0x7f00000002c0)={'ip6tnl0\x00', 0x0, 0x2f, 0x20, 0xff, 0x7, 0x42, @rand_addr=' \x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01', @ipv4={'\x00', '\xff\xff', @rand_addr=0x64010102}, 0x80, 0x700, 0x10000, 0x1}}) getsockopt$PNPIPE_IFINDEX(0xffffffffffffffff, 0x113, 0x2, &(0x7f00000003c0)=0x0, &(0x7f0000000400)=0x4) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(0xffffffffffffffff, 0x89f0, &(0x7f00000004c0)={'ip6tnl0\x00', &(0x7f0000000440)={'syztnl2\x00', 0x0, 0x4, 0xf7, 0x4, 0x3, 0x29, @mcast1, @remote, 0x40, 0x700, 0x5fd, 0x4}}) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(0xffffffffffffffff, 0x89f2, &(0x7f0000000580)={'syztnl2\x00', &(0x7f0000000500)={'syztnl0\x00', 0x0, 0x29, 0x8, 0x2, 0x1, 0x0, @dev={0xfe, 0x80, '\x00', 0x17}, @remote, 0x40, 0x10, 0x800, 0xc808}}) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(0xffffffffffffffff, 0x89f0, &(0x7f0000000640)={'ip6gre0\x00', &(0x7f00000005c0)={'syztnl1\x00', 0x0, 0x4, 0x6, 0x7, 0x6, 0x2, @empty, @empty, 0x7, 0x1, 0x1c000, 0x800}}) sendmsg$ETHTOOL_MSG_EEE_GET(r0, &(0x7f0000000880)={&(0x7f0000000000)={0x10, 0x0, 0x0, 0x20000}, 0xc, &(0x7f0000000840)={&(0x7f0000000680)=ANY=[@ANYBLOB="a8010000", @ANYRES16=0x0, @ANYBLOB="00082bbd7000fbdbdf251700000054000180080003000100000008000100", @ANYRES32=r1, @ANYBLOB="08000100", @ANYRES32=r2, @ANYBLOB="08000100", @ANYRES32=r3, @ANYBLOB="08000100", @ANYRES32=r4, @ANYBLOB="1400020000000000000000001400020073797a5f74756e000000000000000000180001801400020000657468305f746f5f6261746164760048000180140002006272696467655f736c6176655f310000140002006e696376663000000000000000000000140002006970766c616e3000000000000000000000010000004000018008000100000000", @ANYRES32=r7, @ANYBLOB="080003000100000008000100", @ANYRES32=r8, @ANYRES64=r3, @ANYRES32=r9, @ANYBLOB="6172c32403000000140002006e72300000000400000000000000000008000100", @ANYRES32=r10, @ANYBLOB="08000100", @ANYRES32=r11, @ANYBLOB="1400020067656e65766531000000000000000000140002006272696467655f736c6176655f3000003800018008000300030000000800030002000000080003ee010000001400020076657468315f746f5f626174616476000800030002000000"], 0x1a8}, 0x1, 0x0, 0x0, 0x80}, 0x2002c884) r12 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r12, 0x3b88, 0x0) [ 1204.701651][ T575] workqueue: Failed to create a rescuer kthread for wq "nfc4_nci_cmd_wq": -EINTR [ 1208.340449][ T6190] Bluetooth: hci3: command 0x0406 tx timeout 00:20:08 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x5421, 0x0) 00:20:08 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0xa]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:08 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x18, &(0x7f0000001440)) 00:20:08 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:08 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)=0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) r2 = syz_open_dev$dri(&(0x7f00000001c0), 0xffffffffffffffff, 0x40000) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000300)={&(0x7f0000000200)=[0x0, 0x0], &(0x7f0000000240)=[0x0], &(0x7f0000000280)=[0x0, 0x0], &(0x7f00000002c0)=[0x0, 0x0], 0x2, 0x1, 0x2, 0x2}) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)=0x0) sendmsg$NFC_CMD_DISABLE_SE(0xffffffffffffffff, &(0x7f0000000180)={&(0x7f0000000080)={0x10, 0x0, 0x0, 0x2}, 0xc, &(0x7f0000000140)={&(0x7f0000000100)={0x2c, 0x0, 0x20, 0x70bd26, 0x25dfdbff, {}, [@NFC_ATTR_SE_INDEX={0x8, 0x15, 0x2}, @NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r1}, @NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r3}]}, 0x2c}, 0x1, 0x0, 0x0, 0x14}, 0x20000000) 00:20:08 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x20581c40, &(0x7f0000001440)) 00:20:08 executing program 1: r0 = socket$nl_generic(0x10, 0x3, 0x10) (async) getsockopt$PNPIPE_IFINDEX(0xffffffffffffffff, 0x113, 0x2, &(0x7f0000000040)=0x0, &(0x7f0000000080)=0x4) (async) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(0xffffffffffffffff, 0x89f2, &(0x7f0000000140)={'ip6tnl0\x00', &(0x7f00000000c0)={'ip6tnl0\x00', 0x0, 0x2f, 0x3, 0x20, 0x4, 0x42, @empty, @initdev={0xfe, 0x88, '\x00', 0x1, 0x0}, 0x8, 0x700, 0x7, 0x3}}) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(0xffffffffffffffff, 0x89f2, &(0x7f0000000200)={'syztnl2\x00', &(0x7f0000000180)={'ip6_vti0\x00', 0x0, 0x2b, 0xbe, 0x1, 0x101, 0x60, @private1={0xfc, 0x1, '\x00', 0x1}, @private0={0xfc, 0x0, '\x00', 0x1}, 0x1, 0x8, 0x0, 0x7}}) getsockopt$PNPIPE_IFINDEX(0xffffffffffffffff, 0x113, 0x2, &(0x7f0000000240)=0x0, &(0x7f0000000280)=0x4) (async) r5 = syz_genetlink_get_family_id$ethtool(&(0x7f0000000900), r0) (async) getsockopt$PNPIPE_IFINDEX(0xffffffffffffffff, 0x113, 0x2, &(0x7f0000000940)=0x0, &(0x7f0000000980)=0x4) sendmsg$ETHTOOL_MSG_EEE_GET(r0, &(0x7f0000000a80)={&(0x7f00000008c0)={0x10, 0x0, 0x0, 0x40000000}, 0xc, &(0x7f0000000a40)={&(0x7f00000009c0)=ANY=[@ANYBLOB="80000000", @ANYRES16=r5, @ANYBLOB="080029bd7000fedbdf2517000000280001801400020070696d36726567000000000000000000080003000200000008000300010000003000018008000100", @ANYRES32=r2, @ANYBLOB="08000300020000000800030001000000140002006261746164765f736c6176655f3000001400018008000100", @ANYRES32=r4, @ANYBLOB="fd010100", @ANYRES32=r6, @ANYBLOB], 0x80}, 0x1, 0x0, 0x0, 0x40040}, 0x8004) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(0xffffffffffffffff, 0x89f2, &(0x7f0000000340)={'ip6_vti0\x00', &(0x7f00000002c0)={'ip6tnl0\x00', 0x0, 0x2f, 0x20, 0xff, 0x7, 0x42, @rand_addr=' \x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01', @ipv4={'\x00', '\xff\xff', @rand_addr=0x64010102}, 0x80, 0x700, 0x10000, 0x1}}) getsockopt$PNPIPE_IFINDEX(0xffffffffffffffff, 0x113, 0x2, &(0x7f00000003c0)=0x0, &(0x7f0000000400)=0x4) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(0xffffffffffffffff, 0x89f0, &(0x7f00000004c0)={'ip6tnl0\x00', &(0x7f0000000440)={'syztnl2\x00', 0x0, 0x4, 0xf7, 0x4, 0x3, 0x29, @mcast1, @remote, 0x40, 0x700, 0x5fd, 0x4}}) (async) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(0xffffffffffffffff, 0x89f2, &(0x7f0000000580)={'syztnl2\x00', &(0x7f0000000500)={'syztnl0\x00', 0x0, 0x29, 0x8, 0x2, 0x1, 0x0, @dev={0xfe, 0x80, '\x00', 0x17}, @remote, 0x40, 0x10, 0x800, 0xc808}}) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(0xffffffffffffffff, 0x89f0, &(0x7f0000000640)={'ip6gre0\x00', &(0x7f00000005c0)={'syztnl1\x00', 0x0, 0x4, 0x6, 0x7, 0x6, 0x2, @empty, @empty, 0x7, 0x1, 0x1c000, 0x800}}) sendmsg$ETHTOOL_MSG_EEE_GET(r0, &(0x7f0000000880)={&(0x7f0000000000)={0x10, 0x0, 0x0, 0x20000}, 0xc, &(0x7f0000000840)={&(0x7f0000000680)=ANY=[@ANYBLOB="a8010000", @ANYRES16=0x0, @ANYBLOB="00082bbd7000fbdbdf251700000054000180080003000100000008000100", @ANYRES32=r1, @ANYBLOB="08000100", @ANYRES32=r2, @ANYBLOB="08000100", @ANYRES32=r3, @ANYBLOB="08000100", @ANYRES32=r4, @ANYBLOB="1400020000000000000000001400020073797a5f74756e000000000000000000180001801400020000657468305f746f5f6261746164760048000180140002006272696467655f736c6176655f310000140002006e696376663000000000000000000000140002006970766c616e3000000000000000000000010000004000018008000100000000", @ANYRES32=r7, @ANYBLOB="080003000100000008000100", @ANYRES32=r8, @ANYRES64=r3, @ANYRES32=r9, @ANYBLOB="6172c32403000000140002006e72300000000400000000000000000008000100", @ANYRES32=r10, @ANYBLOB="08000100", @ANYRES32=r11, @ANYBLOB="1400020067656e65766531000000000000000000140002006272696467655f736c6176655f3000003800018008000300030000000800030002000000080003ee010000001400020076657468315f746f5f626174616476000800030002000000"], 0x1a8}, 0x1, 0x0, 0x0, 0x80}, 0x2002c884) (async) r12 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r12, 0x3b88, 0x0) 00:20:08 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0xb]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:08 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x5450, 0x0) 00:20:08 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:08 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x25, &(0x7f0000001440)) 00:20:08 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)=0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) r2 = syz_open_dev$dri(&(0x7f00000001c0), 0xffffffffffffffff, 0x40000) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000300)={&(0x7f0000000200)=[0x0, 0x0], &(0x7f0000000240)=[0x0], &(0x7f0000000280)=[0x0, 0x0], &(0x7f00000002c0)=[0x0, 0x0], 0x2, 0x1, 0x2, 0x2}) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)=0x0) sendmsg$NFC_CMD_DISABLE_SE(0xffffffffffffffff, &(0x7f0000000180)={&(0x7f0000000080)={0x10, 0x0, 0x0, 0x2}, 0xc, &(0x7f0000000140)={&(0x7f0000000100)={0x2c, 0x0, 0x20, 0x70bd26, 0x25dfdbff, {}, [@NFC_ATTR_SE_INDEX={0x8, 0x15, 0x2}, @NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r1}, @NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r3}]}, 0x2c}, 0x1, 0x0, 0x0, 0x14}, 0x20000000) [ 1208.354478][ T1227] workqueue: Failed to create a rescuer kthread for wq "nfc3_nci_cmd_wq": -EINTR [ 1208.605680][ T1272] xt_check_table_hooks: 33 callbacks suppressed [ 1208.605696][ T1272] x_tables: duplicate underflow at hook 1 00:20:08 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x21540000, &(0x7f0000001440)) 00:20:08 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0xc]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:08 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) ioctl$LOOP_CTL_ADD(r1, 0x5460, 0xa) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f00000000c0)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(0xffffffffffffffff, 0x89f0, &(0x7f0000000080)={'syztnl2\x00', &(0x7f0000000000)={'syztnl0\x00', 0x0, 0x4, 0x6, 0x50, 0x7fd, 0x50, @ipv4={'\x00', '\xff\xff', @multicast2}, @loopback, 0x1, 0x7, 0x80000001, 0x2}}) 00:20:08 executing program 0: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:08 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x25000000, &(0x7f0000001440)) [ 1208.755329][ T1297] x_tables: duplicate underflow at hook 1 00:20:08 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5c, &(0x7f0000001440)) 00:20:08 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:08 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0xd]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:08 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r1 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) ioctl$LOOP_CTL_ADD(r1, 0x5460, 0xa) (async, rerun: 64) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f00000000c0)) (rerun: 64) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(0xffffffffffffffff, 0x89f0, &(0x7f0000000080)={'syztnl2\x00', &(0x7f0000000000)={'syztnl0\x00', 0x0, 0x4, 0x6, 0x50, 0x7fd, 0x50, @ipv4={'\x00', '\xff\xff', @multicast2}, @loopback, 0x1, 0x7, 0x80000001, 0x2}}) 00:20:08 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) [ 1208.900548][ T1323] x_tables: duplicate underflow at hook 1 00:20:08 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x33890000, &(0x7f0000001440)) 00:20:08 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x300, &(0x7f0000001440)) 00:20:08 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:08 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0xe]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:08 executing program 0: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x31d044, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f00000001c0)={0xc, 0x0}) ioctl$IOMMU_TEST_OP_ACCESS_REPLACE_IOAS(r0, 0x3ba0, &(0x7f0000000200)={0x48, 0xb, 0xffffffffffffffff, 0x0, r1}) r2 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000100)={0x0, &(0x7f0000000180)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r2, 0xc01864c6, &(0x7f0000000080)={&(0x7f0000000040)=[r3, 0x0], 0x2}) r4 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r4, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r4, 0xc05064a7, &(0x7f00000003c0)={0x0, 0x0, 0x0, 0x0, 0x0, 0xfffffffffffffe4c, 0x0, 0x0, r5}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(0xffffffffffffffff, 0xc01064b5, &(0x7f0000000040)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0], 0x4}) ioctl$DRM_IOCTL_MODE_GETENCODER(0xffffffffffffffff, 0xc01464a6, &(0x7f0000000080)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r2, 0xc01864c6, &(0x7f0000000100)={&(0x7f00000000c0)=[r5, 0x0, r6, r7], 0x4, 0x80800}) [ 1209.016640][ T1341] x_tables: duplicate underflow at hook 1 00:20:09 executing program 4: ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000000080)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x5, 0x800, 0x0, 0xffffffffffffffff}) r1 = ioctl$LOOP_CTL_GET_FREE(0xffffffffffffffff, 0x4c82) ioctl$LOOP_CTL_ADD(r0, 0x4c80, r1) r2 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f0000001440)) 00:20:09 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x5451, 0x0) 00:20:09 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x10]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:09 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x369418c0, &(0x7f0000001440)) 00:20:09 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x500, &(0x7f0000001440)) [ 1209.129439][ T1357] x_tables: duplicate underflow at hook 1 00:20:09 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) setsockopt$inet_msfilter(0xffffffffffffffff, 0x0, 0x29, &(0x7f0000000000)={@dev={0xac, 0x14, 0x14, 0x30}, @multicast2, 0x0, 0x9, [@initdev={0xac, 0x1e, 0x7f, 0x0}, @loopback, @local, @multicast2, @multicast2, @rand_addr=0x64010102, @rand_addr=0x64010101, @empty, @initdev={0xac, 0x1e, 0x0, 0x0}]}, 0x34) 00:20:09 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x31d044, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f00000001c0)={0xc, 0x0}) ioctl$IOMMU_TEST_OP_ACCESS_REPLACE_IOAS(r0, 0x3ba0, &(0x7f0000000200)={0x48, 0xb, 0xffffffffffffffff, 0x0, r1}) r2 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000100)={0x0, &(0x7f0000000180)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r2, 0xc01864c6, &(0x7f0000000080)={&(0x7f0000000040)=[r3, 0x0], 0x2}) r4 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r4, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r4, 0xc05064a7, &(0x7f00000003c0)={0x0, 0x0, 0x0, 0x0, 0x0, 0xfffffffffffffe4c, 0x0, 0x0, r5}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(0xffffffffffffffff, 0xc01064b5, &(0x7f0000000040)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0], 0x4}) ioctl$DRM_IOCTL_MODE_GETENCODER(0xffffffffffffffff, 0xc01464a6, &(0x7f0000000080)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r2, 0xc01864c6, &(0x7f0000000100)={&(0x7f00000000c0)=[r5, 0x0, r6, r7], 0x4, 0x80800}) openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x31d044, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f00000001c0)={0xc}) (async) ioctl$IOMMU_TEST_OP_ACCESS_REPLACE_IOAS(r0, 0x3ba0, &(0x7f0000000200)={0x48, 0xb, 0xffffffffffffffff, 0x0, r1}) (async) syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000100)={0x0, &(0x7f0000000180)=[0x0], 0x0, 0x0, 0x0, 0x1}) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r2, 0xc01864c6, &(0x7f0000000080)={&(0x7f0000000040)=[r3, 0x0], 0x2}) (async) syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r4, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) (async) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r4, 0xc05064a7, &(0x7f00000003c0)={0x0, 0x0, 0x0, 0x0, 0x0, 0xfffffffffffffe4c, 0x0, 0x0, r5}) (async) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(0xffffffffffffffff, 0xc01064b5, &(0x7f0000000040)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0], 0x4}) (async) ioctl$DRM_IOCTL_MODE_GETENCODER(0xffffffffffffffff, 0xc01464a6, &(0x7f0000000080)) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r2, 0xc01864c6, &(0x7f0000000100)={&(0x7f00000000c0)=[r5, 0x0, r6, r7], 0x4, 0x80800}) (async) 00:20:09 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x11]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:09 executing program 0: ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000000080)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x5, 0x800, 0x0, 0xffffffffffffffff}) (async) r1 = ioctl$LOOP_CTL_GET_FREE(0xffffffffffffffff, 0x4c82) ioctl$LOOP_CTL_ADD(r0, 0x4c80, r1) (async) r2 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f0000001440)) 00:20:09 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x400448c9, &(0x7f0000001440)) [ 1209.225113][ T1383] x_tables: duplicate underflow at hook 1 00:20:09 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x600, &(0x7f0000001440)) 00:20:09 executing program 1: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x402180, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:09 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x5452, 0x0) 00:20:09 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x12]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:09 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = accept4$phonet_pipe(0xffffffffffffffff, &(0x7f0000000000), &(0x7f0000000040)=0x10, 0x80000) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f0000000080), &(0x7f00000000c0)=0x4) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:09 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x400448dd, &(0x7f0000001440)) 00:20:09 executing program 1: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) setsockopt$inet_msfilter(0xffffffffffffffff, 0x0, 0x29, &(0x7f0000000000)={@dev={0xac, 0x14, 0x14, 0x30}, @multicast2, 0x0, 0x9, [@initdev={0xac, 0x1e, 0x7f, 0x0}, @loopback, @local, @multicast2, @multicast2, @rand_addr=0x64010102, @rand_addr=0x64010101, @empty, @initdev={0xac, 0x1e, 0x0, 0x0}]}, 0x34) [ 1209.375215][ T1406] x_tables: duplicate underflow at hook 1 00:20:09 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x402180, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:09 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x700, &(0x7f0000001440)) 00:20:09 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x12]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) [ 1209.493464][ T1424] x_tables: duplicate underflow at hook 1 00:20:09 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x18]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:09 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r1 = accept4$phonet_pipe(0xffffffffffffffff, &(0x7f0000000000), &(0x7f0000000040)=0x10, 0x80000) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f0000000080), &(0x7f00000000c0)=0x4) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:09 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x400454ca, &(0x7f0000001440)) 00:20:09 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x900, &(0x7f0000001440)) 00:20:09 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x250780, 0x0) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000240), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000040)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) r2 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000000080), 0x240000, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f00000001c0)={0x48, 0x8, r2, 0x0, 0x1, 0x2e1a23, 0xd2, &(0x7f00000000c0)="7f01bbbb54de9caae5de19d80e1adb03b9db66fe4ed95e3a10dc6a8aa6f30a26f03b12d632fda69a5af0538964852bfff960de540017d53c9030762a34a94f0ff43a9ab85e48d4f866c38555d7a6c1c27f8197fc1412ec01b98360df67ee1bc7bfd96fea38d68e92d96897d50c704a16f931465af36bc7130b486118e41988b440e0dcc39fc50d67ae6c34f9754aa94d554f7e7a9e1419137128c9cd0329831497de14d50b5a5af425ac5b43722c2fa7bf7bd281a083728a2fb9e873899c5b030416a11734e3c0fc3b2c3b36266842d4c46a", 0x1}) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, &(0x7f0000000280)={0xc}) [ 1209.558916][ T1435] x_tables: duplicate underflow at hook 1 00:20:09 executing program 2: r0 = accept$inet(0xffffffffffffffff, &(0x7f0000000040)={0x2, 0x0, @remote}, &(0x7f0000000080)=0x10) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) socket$inet6_udplite(0xa, 0x2, 0x88) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$sock_inet_SIOCADDRT(r0, 0x890b, &(0x7f0000000240)={0x0, {0x2, 0x4e21, @initdev={0xac, 0x1e, 0x1, 0x0}}, {0x2, 0x4e22, @broadcast}, {0x2, 0x4e22, @rand_addr=0x64010100}, 0x8a, 0x0, 0x0, 0x0, 0xd2, &(0x7f0000000200)='veth0_to_team\x00', 0xc319, 0x6, 0x101}) socket$inet(0x2, 0x80000, 0x7) 00:20:09 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:09 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = socket$phonet_pipe(0x23, 0x5, 0x2) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f0000000140)) connect$phonet_pipe(r1, 0x0, 0x0) connect$phonet_pipe(r1, &(0x7f00000006c0), 0x10) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f00000000c0), &(0x7f0000000100)=0x4) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r2 = accept$inet(0xffffffffffffffff, &(0x7f0000000000)={0x2, 0x0, @local}, &(0x7f0000000040)=0x10) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000080)=0x1, 0x4) openat$qrtrtun(0xffffffffffffff9c, &(0x7f0000000380), 0x109080) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000340)) 00:20:09 executing program 1: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x250780, 0x0) (async) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x250780, 0x0) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000240), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000040)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000000080), 0x240000, 0x0) (async) r2 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000000080), 0x240000, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f00000001c0)={0x48, 0x8, r2, 0x0, 0x1, 0x2e1a23, 0xd2, &(0x7f00000000c0)="7f01bbbb54de9caae5de19d80e1adb03b9db66fe4ed95e3a10dc6a8aa6f30a26f03b12d632fda69a5af0538964852bfff960de540017d53c9030762a34a94f0ff43a9ab85e48d4f866c38555d7a6c1c27f8197fc1412ec01b98360df67ee1bc7bfd96fea38d68e92d96897d50c704a16f931465af36bc7130b486118e41988b440e0dcc39fc50d67ae6c34f9754aa94d554f7e7a9e1419137128c9cd0329831497de14d50b5a5af425ac5b43722c2fa7bf7bd281a083728a2fb9e873899c5b030416a11734e3c0fc3b2c3b36266842d4c46a", 0x1}) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, &(0x7f0000000280)={0xc}) 00:20:09 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xa00, &(0x7f0000001440)) [ 1209.720951][ T1464] x_tables: duplicate underflow at hook 1 00:20:09 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x40049409, &(0x7f0000001440)) 00:20:09 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x48]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:09 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(r0, 0x3ba0, &(0x7f0000000000)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff}) r2 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000200), 0x109000, 0x0) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(r0, 0x3ba0, &(0x7f0000000280)={0x48}) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x3b88, &(0x7f0000000240)={0xc, 0x0}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(0xffffffffffffffff, 0x3ba0, &(0x7f0000000180)={0x48, 0x7, r1, 0x0, 0x10000, 0x0, 0x5, 0x57bb1, 0x2ba0e}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000100)={0x48, 0x8, r1, 0x0, 0xa, 0x180d06, 0x4f, &(0x7f0000000080)="ed3482e3513056f5ffb74f1c037c3cebfc253bab5348911185d921f00254b8b23074b7a630282e1328b59364e330c6336ca6403df91b8576870de67a85961aaaf5f6dbaf25b2091f4165bf20ca2e6f", 0x10000}) ioctl$IOMMU_TEST_OP_ACCESS_REPLACE_IOAS(r0, 0x3ba0, &(0x7f0000000300)={0x48, 0xb, r1, 0x0, r3}) 00:20:09 executing program 3: r0 = accept$inet(0xffffffffffffffff, &(0x7f0000000040)={0x2, 0x0, @remote}, &(0x7f0000000080)=0x10) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) socket$inet6_udplite(0xa, 0x2, 0x88) (async) socket$inet6_udplite(0xa, 0x2, 0x88) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$sock_inet_SIOCADDRT(r0, 0x890b, &(0x7f0000000240)={0x0, {0x2, 0x4e21, @initdev={0xac, 0x1e, 0x1, 0x0}}, {0x2, 0x4e22, @broadcast}, {0x2, 0x4e22, @rand_addr=0x64010100}, 0x8a, 0x0, 0x0, 0x0, 0xd2, &(0x7f0000000200)='veth0_to_team\x00', 0xc319, 0x6, 0x101}) socket$inet(0x2, 0x80000, 0x7) (async) socket$inet(0x2, 0x80000, 0x7) 00:20:09 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r1 = socket$phonet_pipe(0x23, 0x5, 0x2) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f0000000140)) (async) connect$phonet_pipe(r1, 0x0, 0x0) (async) connect$phonet_pipe(r1, &(0x7f00000006c0), 0x10) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f00000000c0), &(0x7f0000000100)=0x4) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) r2 = accept$inet(0xffffffffffffffff, &(0x7f0000000000)={0x2, 0x0, @local}, &(0x7f0000000040)=0x10) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000080)=0x1, 0x4) openat$qrtrtun(0xffffffffffffff9c, &(0x7f0000000380), 0x109080) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000340)) 00:20:09 executing program 0: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x5460, 0x0) 00:20:09 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x4c]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:09 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xb00, &(0x7f0000001440)) 00:20:09 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x40086602, &(0x7f0000001440)) 00:20:09 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(r0, 0x3ba0, &(0x7f0000000000)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff}) r2 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000200), 0x109000, 0x0) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(r0, 0x3ba0, &(0x7f0000000280)={0x48}) (async) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(r0, 0x3ba0, &(0x7f0000000280)={0x48}) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x3b88, &(0x7f0000000240)={0xc}) (async) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x3b88, &(0x7f0000000240)={0xc, 0x0}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(0xffffffffffffffff, 0x3ba0, &(0x7f0000000180)={0x48, 0x7, r1, 0x0, 0x10000, 0x0, 0x5, 0x57bb1, 0x2ba0e}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000100)={0x48, 0x8, r1, 0x0, 0xa, 0x180d06, 0x4f, &(0x7f0000000080)="ed3482e3513056f5ffb74f1c037c3cebfc253bab5348911185d921f00254b8b23074b7a630282e1328b59364e330c6336ca6403df91b8576870de67a85961aaaf5f6dbaf25b2091f4165bf20ca2e6f", 0x10000}) (async) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000100)={0x48, 0x8, r1, 0x0, 0xa, 0x180d06, 0x4f, &(0x7f0000000080)="ed3482e3513056f5ffb74f1c037c3cebfc253bab5348911185d921f00254b8b23074b7a630282e1328b59364e330c6336ca6403df91b8576870de67a85961aaaf5f6dbaf25b2091f4165bf20ca2e6f", 0x10000}) ioctl$IOMMU_TEST_OP_ACCESS_REPLACE_IOAS(r0, 0x3ba0, &(0x7f0000000300)={0x48, 0xb, r1, 0x0, r3}) 00:20:10 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:10 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:10 executing program 2: geteuid() r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:10 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x8933, 0x0) 00:20:10 executing program 3: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000040), 0x6800, 0x0) (async, rerun: 32) r1 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x149841, 0x0) (rerun: 32) r2 = ioctl$LOOP_CTL_ADD(r1, 0x4c80, 0xa) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000000200)={&(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0], 0x4, 0x80000, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r1, 0xc00464c9, &(0x7f0000000240)={r3}) (async) ioctl$LOOP_CTL_ADD(r1, 0x4c80, r2) r4 = socket$inet(0x2, 0x3, 0x9) ioctl$sock_inet_SIOCADDRT(r4, 0x890b, &(0x7f0000001100)={0x0, {0x2, 0x0, @broadcast}, {0x2, 0x0, @initdev={0xac, 0x1e, 0x0, 0x0}}, {0x2, 0x0, @multicast1}}) (async) ioctl$sock_bt_bnep_BNEPCONNADD(r1, 0x400442c8, &(0x7f00000000c0)={r4, 0x9, 0x6, "f44e057d3ceb0afec836ba07dc459c4551bd1d5c789c75a1e60db2810dff58cea04f4d1331c8db04a1cb02acc69a2a6a385bd57c9f"}) setsockopt$PNPIPE_INITSTATE(r0, 0x113, 0x4, &(0x7f0000000080)=0x1, 0x4) (async, rerun: 32) syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) (async, rerun: 32) ioctl$sock_inet_SIOCADDRT(r4, 0x890b, &(0x7f0000000140)={0x0, {0x2, 0x4e22, @local}, {0x2, 0x4e21, @empty}, {0x2, 0x4e21, @loopback}, 0x4, 0x0, 0x0, 0x0, 0xfff, &(0x7f0000000100)='gretap0\x00', 0x7fff, 0xaa, 0x3ff}) 00:20:10 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000000)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:10 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x40186366, &(0x7f0000001440)) 00:20:10 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x6c]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:10 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc00, &(0x7f0000001440)) 00:20:10 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(0xffffffffffffffff, 0x3b88, &(0x7f0000000b40)={0xc, 0x0}) ioctl$IOMMU_TEST_OP_DESTROY_ACCESS_PAGES(0xffffffffffffffff, 0x3ba0, &(0x7f0000000c00)={0x48, 0x6, r1}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(r0, 0x3ba0, &(0x7f0000000000)={0x48, 0x7, 0xffffffffffffffff, 0x0, 0x10001, 0x0, 0x7, 0x4614a, 0x41b66}) ioctl$IOMMU_TEST_OP_DESTROY_ACCESS_PAGES(r0, 0x3ba0, &(0x7f0000000080)={0x48, 0x6, r1, 0x0, r2}) 00:20:10 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$sock_SIOCGIFINDEX_80211(r1, 0x8933, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000108280)={0x0, 0x0, "689f66414a9e75ee1c96e253d204806aac1cd23471360d8c948ab93ef113c95ac72e74b1a9f642761876a74d0c1d5af75c4a8d5dfd81c5a16f4e1393d26d340efa048766e47e744b3faaf67f179886f580fc32f08b7eafa7d989ed8b83040bf96b549673143952bc605c15188fa11aa0aa433d0ba4b050dc36cdcf1ec1a916962c3e11bcb1ccd3b2025e0a312a0f196de0d663f946b60236f432f86a1c597bf3b3ee462a85814c273f33dbc7ceca3ba7f3dcd7676f96d8285db5da0fdb1ab342ed68c938bc1c14db6c890dda9f9bfda9177dc898f05124719786cf099a67b3edbca9f560be25558ddde1fcf20c8470bca5e6ead62048703e58e1e4ff04478dea", "9775dc7cfd6b20890a680bbbb7e5ef0c2857200c4ec2cb8aaee552e1e45459992bba97e0b38ae38e3f07f78a67e5930556378519a6f9b2f22eaf8a3d938ad12dbd88f40983557f82f8157472c1b5145ee829d37524cd25eb126dac11a17bfb24818c182a9a4f291324b9d145b970af10283c54c4fcea0ed4f81c16a5fe05e8fce9138421b69d0b37819196ba361227752c4a84048a67c757ce1a14bec897f864c2b9c168393fe29647f8ce433167ec3ac8c181690384979163b9893130acee1d8ccaebd6abec5b670ec4d7986c26d0130f67729a27deb6f324927537fb64017be0424edda2c73ec356110822797391aec76430eb10202fbcff4a3084a08ea2ddb8b830e23588feac0a13fa1e9417e7d407d2c2cd57a870200251aab64399e96d2f393478eb1b32b312b27ac0fe32069b11bd83b1853b739fe27c2f8ba7d05fd249446874a062fe5947d4f07a636d16e682fac16cbb972bd406a6d1d6db601c55a8869fc291bbb078bd17ee16bebd4d2f96b79aa4bf2409680537822c0de8e460ba1e7fcbdc73925ce015770d02489346f96ad8269004b19f0aeec6a5b53a9a8d59abbed043355e466e67a0093fc4c9e8f040686841c22c6e1a4b04c7af86fd5f4e02143eca5f99b02105d42adc74eaf9c23c1ac70d78bb7a29b7da6dc5aa378dc4dc3b259c86e9e81e33ace318d2c8fac1964585d1d5c0a9dfb08dfbd668b9bb3348b1582a44fbf78bca85d8787c66094d4e915940ea620e8824c63d0bfce01f9023e69e39e0b64bbf9e4e53cdc062815d805ea60f72eb3b4e7a66687ca72766593612118e663765ac67eceb318dba35f392b4a7cd8147fe5d8f62db266004ae5c5f77bfd7dc20363c126288bccb6c45adc993ee845c465f5e10f9d26b175678e92e984d2fa3b2486ffcc1ea11a1ed3b973f3726c48e65c10b6b520a97d47650cfee745c7b1087a9b47e60875bc99982b72c20e39e692071b41b7941853ae94a74086b02b86bd65027eb91bd385e96269ca2c61028d83391cd37abd32fc570d30d465ed06419af3c0bc0721180ed2064d849e0c58c64ddb80232f2747a10368ab383ff7ec4b46b07a2adf3be63c2a2827db4b2b4dcc2d55fb115fcb685679af3d438253d459b5271d35fbd3363dc0ff08703be0e7df0d4dadd35d549b7ce3c43c4ac5e006e4d35414d579cdfdb0d56eecb89212331be2337afdcec8c7ad36b72ee2374e92fcc2d521258aef6f20d350b801e2dce6747fc2387e6c8a522a5389c7ee1417f09b7105911d8d0f70571b267356ddf1e88a24295c49e8769ba7073367f00b4d789c3e8ad95fce5b677aebaf94a905a30f3a7e48e5085553ce8781f410957efb29f4ee2de88a23237ffd36980020ead2a27c1d4db88aa189ea51dbe51441b6ea383ba3414a64444ae1bc30b6a3ee56cefd516b8b92ad165274c1a7a790f961923fbebaa4b1fa33c313943f827cb1ceb4141f1228553a675a3446122f08491f27c147672f664853fd82a6f605c30ee0033b501167b28aecd83dd9a6ad4ccc2295a9466d8f20e8a19ed1b7fc8feda093230d52ef714f2db53724794c989377de14c672db6ae0135b56409e1dcb8c1807db5d492dab677d995baf3d22cc52f67bd67af1d93bcc1d63995668427961ddc8850f1f4f35fd377404c9f2cd80c9e487e6e84c007effcb18ab8f774a666c934a78159d1c2463cf8d70b167c5c1658c14f320c14dd21a0b20e110755bdcc0b9c45730ee5ad40ee81759d4f554c53ae9b5cdccdda0a43d84a94293f935956917b423232e97b407ff94fdc829213da59b242e8ff5dbf6c388b59c0f1e59d32a6307ab78c1b5c5f6798e4c26c238e6d2f361fa4ad17424a71e464ad01db87be934aa21a554708402e10586f9d9a426ea38ebf644c653d213c9bbf4e70fdfe421dac88b5be721edb2400ff93fd520a7c81d988f6e4413599c2e699b635e917463ee932be0e3d8d67c3fdeb3acba3d5b0f0e6bdc3502703ad15350cf99703c3b3c9ed753a5783f870f29620f163a2c8ec427bddf69e273641b66f561c59f51ba395c06bb02da7f7b7cbe83d058c62e6948a3d1ed74d8723d49eebefc9194b88dd35858fb77a09376508685730b740f13cae4c1b4f2a1f3ebe19bf797a0f02bd82c18eb9e47e106fcfe260b34e9ab2cad8564553cbf51a96113d64afc78a3c8b5d6bb4e3358fe4eb0ff28dc841381027a68a95e219066ef9e40199d0e395d6a4907bdd61476681f3cbf489820acbc02cccda01ed26d58a5baf8472be7432a90581530f7e957a041e68f3a1c20c68eb770ed937d083a069be3e7e613138d7f679d5996d1bc9e807c6893f3889de1857da9bd8306546358133f2ec03d615a92c00059ff01a72c466a32c5be3b8ea9301d78f76f2480aa0307506fb0b843c702ca0a2f574abb324953c281c09e29db0227e5220770cd2dee356f8ec814d46e7681aa81407f2f369283d9aad34a5e29c4511b745f2ec3fd50dc6f83c42febdb5ded60e19369c0d441e7e11355a0b18a7e70e5cca40fd3ea586ff6c806c19aac34118930dfec7ac9cb760d3091470c643e753e41fbbe6ef496d058cc2009fcf96f6dc3242cf08f1feb4afe86677371f39e129522bd8b92f2170c4cc1588b3c29d2416bd471d331f0f50cbdaa8d44f400645845e3396f77df23a7157f44d62b3f2051e1d3e53ee5d9ffdc825d763239d5c6563a90cb811c8e12a52386da4e68ab9a0166ee57a4bbb8d3dc16202457274b0ec9aa3945f5567d80288dfc71b3129e79244f53cfea8077cbf0b9a07fa9f818e7aa9217b7e6b90f45a2efe0eb6c01787c769bc3b52361b0f6c4a1af6d8bb19eabc190b8fd2ca431a6dd553d0b92d3ce55ff9daa5bb2c44c2cd7eb98f5c3362607dca198af62c3b347a5f34818c76650dc28ba41c36f02ae69c3ecb55be72a50daad6a2d02cd51534c08bffbef8dd8912cbc58969af7261f980d30df51a31561088328a7e5e1f33103d72c8a8dff14622f764eae1e5bd38409140a62cc9d74e7467fd6d039be8293aa662083f589bda7d92a3ef1317cb8761de4a63e5bc8e9bc319aba8b3216cf5cc99e2cabea746b87300f6df876dff7749d5f546c2f0aeb14ae66cf16fd3a02045e8ec46d5c0baa41db66deeb312f4d06cf76a7c808a289a145fa11d2c0c8249703afe8afb383377770850428e96475398bbe36e379794d86f616341eda1986b9d435e371dcded6446ebc2b9fca9448faabd99313788a3c13dbcee70cdedc5cd7537d479c51398a48e001fc08e36a951d071a37b3e642927888c15ac1c2909cf0d3c952b4ca305590e2752ebaac31f3f3d715b393363db7ace23830b7b56ee476fc0790f4ff3d7e634e6964bc01b6c63528f4fa822b3f8982e30532e2ade2362be40fec01346749a908dfa0a6bcb4885e09b7a6c1caf1fdec1d076c3aafa0715f4676a0c63848c2591a04cee78bc126ba97b814841f5c7809a9d5322b7409d21e80ea10bce75b24cd561a4b40ea3f3b07110c3951b2400c1eaa115a184ab90e70bdd4e5fc9e33d1a9f337ddf701b6a2146395fd5ed1ed68fdcdbb25cfaa05f297eeaf350121d224e7985cf48bb385c26a85fa6607d8f419e50e18f561852101d91339c614d68b28b5481b22b6e75f5428d80d1db746494271f38229b3ef9c69eabff6fb232fe7cb1eff14950ff5244183b57568c0307bdf0162832e1bd750739ac13c42bf261a0c0dac8fe28c6a89bb015ed79086b82553764ff002042ed56a1deb1c4d1359819055bbad470641b0809fefd0b547f85eb81456630cb3640e07c456c9a3965fa3831aa8759e0721d5b0096c8745f2695db9cadc32bcb4d97ef23a91b432928323ce978fe8834077479ddb328b5ced5a6c427b3d2cea74ff6cd2d56d31802eeb6fe7d538d9128b85032f1c55541331c587e01344bdb3000dd1036ba28031e7c34d74a8fd3ba39cafa17a9fe7430b640c0cf6d34e5763f1c5c59a20f94a67b2137335e54bed042680d237d14fcaf1deb7502c6eaf59fde38f1be657d8b0acd14a7db0c306f7ab032864d5f5a8f6da749c6d7eb12523337f298bde25c9a8f7359f16c03dbd8e12f8b0204b1f74645fbb39622dd7dc938cb168c7510e8ea673e1b0427e43884bf8e57ec3e2712812d02355fbfa1927066f0996deb1645942858f784dffaa99751db8f55801eeda0a5ce9d407507542d92d5376164cbc00fd7c353fe7c0f83440fece7bafb42b3f8c466dd20cb6b19cb2db3132bd6381db2e9c8154a4154b4aedd3a72977caec8d13f35b10d95c04eca503c22bf6627583062a4b8a97d0fa2813a54ec1348b9b23c71441443b472e8e2f6717a671cffb7a07fff2bf30da4cfd89a85e30f0ccab2d32ac19f22f81d00695f55eef9f42e5a625b78002990a2f967d40ff9a212059edc60f8ebeb36b8e7e2cc755191afcb494ee07fc46c145171e29aef6f4fbe664103fa0c029a8fbde46a2efa819c1f96e493a793646522765f2ff4a7400bce0291bac5dba3b02b461c5a1ffd881e8aea2682335f01b210ae93dcc16543de86276d9c202b60b474f522dc790e50968f2c32b2cb982f3d004fcf5356597dd21279b839184346a836e00e836551d93e5bd9f377cafafef7262625ee3b92cf1b34d5352906684e01cc86bae57a137b5313c11a630aa42fe93f3f47a8abc3597ec0fcefcd90893a93452bd3cd4c87ebdc37f1d5f6669a76502d2c717017fee5f345d0286c6f55a6f01ac0779d80e97cb03d639ecbd518779d9f4cab929218a521eaba849b9677d17f2ac4edd598d5ec0ad9cd2ea2fbdc2cc335c68ca11c966f683aae3f7a04214094da4837e821b74bc9f1cefbe77e1c4336f9e47bb5ad9707a419339294b19308672c0d6bfe0e2c98ff714b8e9c87da827fbc5bb5960b5184d57de47994297d26d99d2300c613b3fe00c0e3c1fd0f791cccb850cd030b04105548867378ae7f2f511a7e18071e6de5f3e2fb63207833476d8d6e65ce12bc3d18656e5b217a0059bd98b9968bdfd500ba317a695d81feed2dfb751e0cc00969fb710161c031a7e5a4f0946dda7de225bd49800cc790e387b6a3c96fe2ba3341e71eeaea9a5c44bb42bc47f89e232a29bc2b62821135859fe744cf6f54d8f281f85e0635de0087250d5dee16fbceb4d80b4843eae6404e204e8fcd46272984e835c38c0a9fe60f20e0351da9821397fe8c68f24fbf7a30144d38a921d16d05c65893a4a125a81d3e3c54f94edb0a9b90fbd27496ed0cb2bc990a2675400e08ceab041028ee9473c5b938a43e820b5ce333a1c7325914eeea3a2a0f059bb58303bc6701e7bc091213ac8a9dbafcfd8ca12feb2c1a1a0b13e551c87aae0caf5f79b4f673b529fc1ce473b3a9c885cc9adac7028cf18e817bb2696c2c9557f122"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000109280)={0x0, 0x0, "b09c5b7388b1aee4ee684681290554dddfe0e55a1acf23e899941e7de12d47280913bde67304589c6473f13b4e7b212ac121b7d7a4241f27b83ca3e1ceea76dd0e22789558b12791f5dbbfd691d6bf23a24e4ffa13217ce3c501bb21ae3d2c1876f99b056ffae5a576905b519d3ed7aee6b1e07a00ae418fe2c7a12588f82a43c6ed624b5b97e1fc387ec157cf24b6c6d94706d8bd254575988479a0e40bd7c6a45dc918fe60e1c2d1c71c30274aa0ed362698afed0838eeb4ff5c0ce34c3ff4a1b4ef8660bc86c3511349eb625cc51c9a51e38e3650f602952f68cf9ada9eb09bedcffe057b504a32a97b1d028c5a516f97dc47b5fbcc869a39b71b11f05b15", "6f1c068c032f7147590042809255e76b9766a6071d9be2763d1764b36884c8a938d87433e1578b4bfb04a0dfb3dd50010cc9901a6aa60738a479438dbfbc80f7c39744db7a65de0c4f9020f6f312f0abf96d081dbe3f431923be568d59af0b5216f0eb887045dc5346ae69a20d6f3054457d7232d2042dc74ed4c0211609bb310495f96d1265462cde162d32d835963e210ff1644a185d475f43adb918c9a39fa343810b71fbb13cd54d9cefc8b97dbe17742e841377e94e08ceccc565aec6e8eb9e67514a6762f579bbadff14df8764b983520b3f6559a5a8a966408ad578a2ce2447ebcfc7deab1d215f4ea41b7c307751af5c93207397fee84f8f5cc397f11a094764834ea950b592e3b190529d19f4d861846642a2e9ed56e45e11fbf9e52ccfd7cf70bf318c78d3c93367c1dd9c1b400b7576425bc2b6dff15c0f23b22a2a3dc1b7228e20a96085d1063e58aad5998c5da43113f4806228ff26a78f6e6433aa1ced0c2786c5432912336d3994666de613d3210563593473f592ac202c17c749d50f9746f66815caf6eae338c093d529b420254e0928fac88c0a41fbae2b97d337825926d734d93502d05c67189eb2c4c065d073b7d2ea1379b466c255832e1259d4faa3d23aeb47a67bac168f40e99adff7341209877e71e6f1c60141d3c051654e43be26b074cc5f2c19d1db505ff36abc8454417e8f28f825f201e4952fd833712e5a3ca3be49883dc5835c47de2342361882aab53401d1d85244422479b05e24d5798c8d4c4a1b01f862de871b7cc5da659f280126537179cf5591dca2979156a720be78c4433943cca600f9e0260947515b463cbb50e0e72c51ad19f19f1960d4a25a4e80a4a8927c8efb05cadab23988ef9dc3dca7dd7bb736e3cc3ee21af52aac505e5ed452a970fc2139e1586440b59e7e9ae25957b5a7642f842b1a052e86035a58b91bbf7f1d32a6a8999ff0f5c29d3ffbf22fc35c7f67b92f8e9e4c30aaadc6fd8054b5872496169b00a0442bd8286b200fac69bd84674794d6e207f15d7a3fe79eaba3be275c4062e2b2b5b067a10586f3218578f5bc717e93e1fedbb8359016cc2c18c1df70c0fb095e5d46e918a845728b0eedeb8c6b47e546758d9e7b782371e101e6b7daa9fca7cbbc9e8a2e1e3436b28aa6d67105adac636895a1434a452386a116944b86092d52f424761bb7bc6014d3960763981a03278a8c96fbacfbcf9f01703ac96aa2f152237fa2768c48a46503b98e757428d5a2aa22f9ba7b1596a5db443d95d3fe0d49579138856e073e503161cc22274c76a20f87ced648df1fb45f960e5139c81088be36427a84bd81b942660f8c96da52b58cfd1c486282240562319513cd2716a2716d350e06fb7076e761a751addee4d6b0db72cf9181eb06974cf544fa1dbd8a022c4066a33aaf2ab6e5b9d588d73619350ed63ed5dfbcf6f2e1df0e5a24435446cf4e6f087b2a46e17e8335f3ee0d633258eabb52938c1807ba037bcd55e2c43ea9a0a702f69457d3c7e8f9ad694d0d1aff5cb1df930096dea378b2c164a11620ede9e3c2d96e1cac941879e1b5e261c4237eb2be7605e951981963aa773e455ea20f40b1e0fca17754b47cede4525077f9f82e03d892c066fc1c6cfd90a59a7c10c121bba269081faa9ace7175969ea0ffb1f6fd334b7893fd08cc1c077380ea60e21513042f009b8ce7bc6b2b6829e992d99dc80461294bbef50a9161b505ee15edb90b3e2a01801a6778e00c0930989b1a856c65d0c31f585dc98a8f364916657af06973da42bb3eb024c92638c2b9882f9abf5c556faad417607d2176270ef7ac153ac80e7f527249993de9c28f84f17c156c89d0dfcd05159ad0f48f528f0b8055dc6240f5d68b224b0fa337e9b69b24df8ea1a13816fb24285c901c02bd81a16137d3b5860ed460ea315546a0f1d8227afa8e0687dd8b04d875ea133fa7ad3b26f7310214d63d2585ab8fbeb3d2fc1d876422c2fd872e2f5e7895b00eee022817633a8b1d663d6d221aa9f30d301e530b66df280b1eb4f620d89fc78ac2161a68b25e481e0e9a61a0e71f2ffd0c815c8f10e2a33170d556515e5d7d8ba98b0865e56c6a64e3d934693266036028e7d068cfa2e40546beaf3b77ed5660c1d021898dabb50124ec7b8516013246d0e53cc3cf89273ad8ca90eaeddaaef6f9fc02b358401598d5b960ae3bd0d603242b3f0e985fcd71b1e88517c0548b34fdbaff8310ddffb8417557c378b5ca3f2d01da11d7c768d741cbd01d69cc2e3a15f602c7193ecc581dd0d0acff92aa511193c07641f4c9a767eee2bb0f74a98d7040482af624d4e70561c0f8c7824e469792698eba844a09cedab8f2da8d54fc5d72a3207ef97a96c56f28ddc8c5a2cb0e4ab307aa63d06ac1dff02e87bbc013fa1a0116e5d5fbd49f362dd7d6ba008bad40c8d9dfb84799b727fe0b1726648da01cc10d700d831a8625524436b0adf585a1ba0c3e782dd160004632fad8017f70e9098c0b38aa04b0e18c21b00e8bf65f73c61ebdd82348ff2cb8978c89811ca6b3fa4fc7ff89c3c6c086fb1555049789d399cbb8b9696157cbc4f20ff1d6c51911e44a4dc2614e8b19616623c98423d0183f6ce8e0a01f109f3ee252affee1a5a8f24485b10a2415f2f09943a18fe8c24591c2236eae762c35304ece76b9b948bda9fb3f4f731c19b18720a4d2042e7eed55b5b4c703a2ec1bfb91ec56d5dd5118e746f0fb792094f026c79511760b98815132c120329091e997846de4c8604dafacebdb60e9df3e21c92184cd5bb1ec1d94fba4eb5cd860a40ea2c2eaf9a839e867c00ae1cc1ed7dcada99bd5d93789bda42630330a0a49b583cbd9e6bf9cf7adb81d46ca96940ecd2260dcede5319dc3f56f6b3344c8b8a60264c178984d0c79db797d05f632e360cda0b7bb878d9d02430b316b30fa991c11e45992e1ad2392397f38231e4d3b8c35f7591251feacea7ab3ec32c8a70c3a54d953792946f4f1a2cd189ee1c50c8b2455e384f5cfc7e16bc2cdadece3eea6dcde74f19889e4c5f9e902390783308ce36c1fa47adee488cc415a0dc125049fc84774300602e87e706593b37f451744de7346b79603de2c2abe0e59d2bbec650e8f94c44b8c029b486b718d785061629697161835d9f30e26eddfa40851cd21ddd8962e5accf134488b1f79d2623a219cd5251030913aa276224455eb9bf88fce8736f0d237aa2dde221faacacf7a71924c917cb1cee6aab5638117345773fffca91c9c01e04b9ed9db456608e3fee67ed91fc203468ed553bb32c49ead59e16ce4c61c4827aa5f10b318b586b61feb6148950687674ede9f78f865f47e4d81be2fcf5a91c1e7b57685ec97367889995c04e7de7089b9bb105fa53be7e0f2c423b0de64a19c82c13133c15771b3a375a1f1347889225c7e91b2a36c9b5e4ff47ef25df56993f0c8d31904bb5b4c3e6605ec11f954d61b28eb594633d390e45626312345993babd070b070187de8aeddfe513024ac0ed1935d6cd92004a48592b1640dcfd17e969c09653d53c12202554281aa9a5cd9b946eafc52231bbf7d35b2892cfaee82aa22e8671dbe75808c1e576499adfb6bb8fc60a042513b5bb912f5003059dcfe7d939590c36bf672793d208076784deefde0997ec7a13f68870d8941c3196a3c2ff5965d9cffe2758a078a07baa053d2e54e8640e723c0cf6ca3b63a2e54a27f1507fce6428ff9034c6fe29b1db48c2f84dd25e2b9305fee1f8bfe1d7d802b7c0295358d649b6dca41c065db6c65f23f9fc5cc639ef2510ad0b559fb841625adb496fec9d5c1b9d6ea84ebd705aef3167e6a7cc04bcc636887933f59653a4048c8bdf503f827f9df1c08ab09b51814601d17933d5d5dfc6c8e8abb7243c8ae91d4cb7d5fa6e1a3343169fed00887aeb50662de6372fe78c6c2cb9cb7b2b19d44582aac7db0d26833045ba9f941c2fdcdafd4e1f6c0ef7962e59aaeeb28e439fd060067d22ebaa29cbe4fa90c196632d00ca4807535ccbcc3945616b7aed1443f23456cb9f89a037a8a24e3ae52f4aaa5dfa617dab2889adb05598ec1f88dd7feffc370a6eab1f33a41ffef48b7e0228929ca6a7dfd76e66d024918ff371f95d8cd5eb96a49c1135ed59e641ddf80c6ba52555067520349c7acada42bf7b48d9c9faf79e51825a971f631fdee010c7d748b540ef72ec9400758a9b3ba32966fdbccf5f068578144ed02e3972737d5c4fa04d81c9a3fdf08eca43fb98a6e57b496224e4b5494cf119fe1307cf0ebccea0a10657d21cc4140f328af5b5bc6721b9e7ed97ce21fce4ab72cc7b7aeb00687b86c4383f2b183634c892a8dcc3edafea99e1f84520f3ed7273fe3ebde10d5f30b91636d41f2325f272e6fb3b6dceae4c96d1609bec8f7eebf18d6d8c0520baafd860d41bfdd145a80b03dad10a9407755c7c70d3498280826fa5cbd276ff3f88ffa5f64b3f8260dac4c370020a193dfa01cbf5b2ee8f3e95faa23ca8f77850d92b967dd800c892a9b6c5edf28374abdf5afe8b3ed663579d4ce37b55fcc4b8672ea25ef6108d8f41514ad6c2c43a48b927b883a508628cc8d773a44d06fbfc5f4b9f2d7aa98b6706479010115a86352c68f39658136adaa4aac091a25f06097535ceeb1ea46e6d9e1553b2b0106107a9fa10f2e52e286182b771487600aba899909fa99c8af8337d24a6c201de9f14f507562477c1aaa637f55343442e5523b1bc0315de2b00bab65b2fb562fa6be67c7d60fc31322966690902d40bc3e8a9feb2c4b7a7cbc41bdb08bb58d8a72ef4f0327328af29145475edc5936efd2a70d2629a9bece24128aedc1a848e60ca8f1fe906132926e9ac3cbfbf6431986d464eb449d3491faa5b77db6c0da7960bbc530d82521d61a1003b4bf4f0d311ce2f09c9bc7c3667501107f2d5df9adeca34986365bcbda12fb13361404f5230c662eb55de60537c66c95cfd68f397cef05979353725d216bac144602de1ee0956e1da41ce0a0b784807456f7513512da7dfd3602d6d4ca0ff057cf13522a2d0d838406f3af065d1c238d70d42acfbaf77a96568ff962f04af47039b5e29458f2adeb532892fc865b239707bf0ebea29a99e95d6a0c85f2b1c1ab8d962feb41fb91c9772d5430905562898f40ec89072f59281ec4a92e994b07f8ea7842633e91f9a8bc47ab243c75f492d088692675cad050ab45aefbce0dfb3acad1ad8758bc00b48362fea224c5cdc1f13bafac1f036a9ed1014e5cfd88e965d0538e812c840bb94b3e1ae4feea7490b3f16d5fb6729337b764a9fb5c8e7e270bdbeb8468a1e53343e69a0ec30020a7606d152496b7eb064b75b815ee2bafcc40f2a01fb102fc9751fa31f36aaa954781a4e206552b53"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f000010a280)={0x0, 0x0, "04056b7c7f7e6dd29915008e2eff8c86e9b104ab19fb3f79bd6e4f4966a3faa7bb58e93ab6f73cbb45747b8c8e133233056d4311d934ae0331ed464c1bf6ccd88ba300c8c130f15276306a865ca613166d8d00a4c74d93c1b8f331185e71de1d82d9425699604c2fe0491d1f8e273ee7d874e704bdac0cc705eb11dca290abd9b861edc528dc2e1623c78a922612c5ba2c5d521f7c6beaa9a356cb1d7bcb183e4599f086e018f31cd40fe17dc80c764332d26d0d285c325d7fb960612378206e196be9b423d2911092e4c4a5182d7c5dc3eba356b02eab6b1bd74be9aa490f94e32c8201cd19806cc72d70761c070d7aae4a07d0b21323371bf0009450364308", "a0875a9ea5a6e7264327cc49d7b63a1a7072a38ad4004f73e69465c6f91bb88794e596ea441150c2c2a6870fa7bc015ac859978dabd9ea905b2d4697b3a85b649547ec0e2c0068f01c20e505bc2f5ab652f2ef3e75d77c24198cb9c337a4de622b7382440d570f822b7a4fbbd28dbc8f24ebf153fa1a3472b01b1583617ee033f6c36047a5b163561d6295426d8445f81cb16ad7bfdf9af5dec3b6672744bb9e3ea8839a44d2c11b744c151620362f1133e91ea9edc491568da205816bd6025bd7349d2602ae2bff0f5cf1851ba9e43d963b7c8e91c41546c2473cf2491fb5275203d032150aecd89945a7d2f9fab3801083f99e8b26126b8a5b1af2820b613c74306986b76fd354aa90acc5c23009586d25effb83d9eacdc151ebcd697f16a72f95095a75da4cc42bfb839175c65cab28b49698e7dc624bf3b51032a5ee3da63cceafc3e00fa677612934339507ece9114649496f1ce8c4e253fd8809c29bd736d060c1d88bd70a5218193face8a9897c26b3fc027ca758e9ce75f6762b77f7461c27ad986e96c100c34e4988cfc2f10d2a72637893f7af71132e6dcd10840eb94da3bd4eef49215185d6cdd5b54460668c3c45533703da79e52a3298fa7986ab93364e8a37e2b0e5b5e0ec5adfbd07198440652ceebf08a1a008ca3d7d96aa65726992b1594fa86dd40cf71ea771d4a4bded7ad66cd50954645ee61792eb3960297616729b07903496d15e2455786f64d163c70670d0347822b01bdf2d19be7c40b6008e679d8fe5eecd499155226c585e4074754c26356e750b1774a2495e784234035f154cd967fbc38ae0eff50e5850f2e7ded0103bc8976d3c3112898c706be4eef057422becaf9dd43e44dc0c49d95b34c227febe9eb8df135b65703e6e1211f70d533f6afe70de1cb377863f51dea9d28782fef1ee465b01f2e4345739f9399e64e08313d05719eb9d28c7d701a98c1894ca1463ad0b490e72b3176fd4e8d1499d62ba2e54c8a1fcc055899e719ce63168bfa992cbfb38426a3a12b3ca79cbe6178f44dd6a37e2754f8375b35bcf4a29724c4e2e6b233c343bda45b07d2858324c988749566a7b7e1c47a41e237fd364db03f1943239183c3427b150a302f766753170df9169a7ccc4c36aaffaf9ce47c4a4ae5fd722e01768467d746f2890567e6225c07a1547041128225ae172e0c0297280d63482ed4595422c6a3fc1d7a956b125bacb755d2f6c3a6bf7b2606767c8db25047dd77c075454eb0ca15234d10f0eeb2aba214954dab81199f204aaf9d3709e321f0fa3830bf01ec20e01a189e22dfa5f1b2379f009decd459e8242a8ba4da56667cddf23ae39fffda8616a64a46cd4667cc74af5f61c3c939587e4d3c645baa3fdcabf1728384cd0ba994a7f9fff80aa551a44c0122d2c1fde3ddd44790e3eb6f199a4b086d4912f01fe8c9819bc6da0b442f111ea803aab3cb96dab6a5072002f809d137896ce4fb25acd5d9bbdb3d74a1325731cf12fcad4e36823543539117f9a34e5495b6dd021648c7fa0144c1b34e41affbfda1b22325da367c2536c7a24d8fb5f97fd6183835889535b3102b56d05e0129a0eb1913ba288dd1927e69edd7c903c5d85f9e8376860f5cf656a0af24b200d5d76c1436be1c7aa1e36435a9fc639bef7231689db27080a0e2049062ee39a837201ff761300e87f5af369c3f7885fab03936b3eb182713300fd79911505a21e678b4549d703962c8ca22b731e5bdba65b665aaba6a77ff22605afb914dbe6728fae789dae636cd1029ef06ff1b01c37a44684651817db4224fd220f0e0a8542d84dab93d88e1721622d1f1babfd5063c402ef0e940d030b0d247ec6c9d07d5e68023540ec998b361c45507fd2825690ae2ea5fa449fca7fb39912a349620f8469bf0fc9a06c8dfecbef43b83e701955d9058a984a8a90efff499dc2e59f5718a63b1b850356bc64564dc634ee8642143a4c5e54933308fe0e8906770885485500c835f3e106b0249a7dc5422dd3638954dbeb316cfc37450c44f6be70e7cb4be61a7df8fcbce7317c8ff67d881c2830c59f6b3e38eaa93f3180ae343855b8cce66f8b1d9e18ea390e2501f668910cf6725397e69b0cd5a63b78cce59468b0b55f6cff4db050a05262c538cfc9b75ef163ad71879e50ed4ffffdfe0c926ba12515728b23e85bc0968fca638adced2e337bd74b53f443f3bb8d37949ce2f6f3477f0195859679e0c951c8c87b9089e8a596b5d2efd936182a359df0a699174268bddb4e843721e3448df150033ded129658fbef11c82d04a6bb388ff7fa308103d614b1c651b835fa3f714acd4283b3cfaf748bcee0d0b458220a24104814fa5e0669163338fb575d33479b85113c6a92b06e9857586d865139e698f92748d222636ffd1d8f1063fa9979bf1b976c466248489d0050850f42637f446483692ec33e7630a7e0c0c7c366171e8bb0c8921551df58c386fe0eba9e7930f442899f6a6c349702c862b6f3dc7fd26cacce27a7d875f6db74b85aae73accb66b2633bd1f26224848787cf484268c0f594f116a5b881b13da6598ec093095b798f7e0c96bec02022568b707f5eb9aab2f7be9d4eaddaad3b2c156b06cf0082dd5777fc78530f53815346071bc9260ce257b54ee7ae4240679b12640841b0015e0ec22feae3038ec79fe04e67d4db533bf44b934605c52245d455c3431fbe38149ec0e11cd2e4b477686b07d19e7289f7929ea4a88dab635ad3027a0af745ebe4e821c1d452c03839c3e56c52417c9ed769409dac28ee0847a71f006243373ba4baef85ec9e64cea1570dea668f2ab433ebc57d690f9dcbc3dbe8bfd64d6d8f2610f91d92dd87d339c93723a83863bb5ec8a513f345576504fe00a8c338d23e425377190dc1566ef3985cead73e69295482db9401705ea05c6f31c701c9f09df84a4dbdfc15449e505691929ce69500c30a9c757fe955a7bc2f2c31abf72b9e3f1ba773fcbbad285d8ccc68cecc07d04b94b5685f26c496f2d141950718c7329e4df9da645a3a6528c25ba8f4d2a53add4c3df5aac22e56830a929cdc9e68773631e89213489b4bb8bfb41e140be73ac99d866578d2a5d48ead81963d6e8f18220ed5066283dc2f9a7cb64608fc474928d2fab719cf41d10066c88c278bd0749908eb4374c0d9b45affdd1940d48e047e286ebf34612932ae710b650c70b43afc47e52483d226d003bf3245f21ee9f3a1637b8bce0176df2ba74f885d8fd15de2a69ac876634275f41462fa423cf7edd5ae99d961565f57be78a89e4e5682ceacbdb19fd278a005811d37f52e19e77b4dc1dd630fdc6d07254f644b31151689424daa06e1b380586f1fe51f0d2aca974a0d424e4c72b2899d16e77bdb7a976e82afd0865bc24a491e79a18ad76fd8bdc790649f93e50c770b82511c3cfd2d1eca5c58d3b36be351ad0e7a11fa95491b6ff4cf3640eef5d777cd2abbee158ec491409affb987252627d4b59c5f637a716fd85dbdb4a4c443cbade0bf88b9fda657ea02e59b1c5e0871e1b52f6003df0b69e49b314939e31f89afc32039fe0da2a23e098b911915a09b1979dc047efaabcd9aec8eb22f3680a3063a3d3c380acedbc2244fdd8f39afa21113a1c9da2ea8c6224447f0646c8a1c1414c53d7a789ca8c5348678c52400aecc620f2e780ab6ba723b1bae704364e069dc0e439ba80e8c47df6f4097afa7d05c720d8395d17451e7609e4c391984576fc7a957e0680debc2275010a2e0a521f901beba45a3ecf15d43d1f7b963a7b95041a353c70cd107154e60b4d7845ccd9c15ee02d1fdf12966d2d7bc0912a080d9402609253489361668cbf2ed1753634436f987f6e5d95947a9f9257d1d17f86969bcb4269619be68b3c218ee09ed062756d4922df3dbf394e0ada7c973e113037d92cec97a153d5176af7ae4849cdf3dced82c2039d64029a4815d36eda44e1cb0b74b93971dbaa2a14e128e5fd1fc5a4d423a848132a1d57a3e1f1e613a166b9e3b2c9a4ec7b1b266b808ca19c68afa2d5499cc7ef6c4178468e7a1f8d372ba326f7fe3b207fe9f97beedd541472313afce2c2d8e497207d2ae71b776db4e20e5d09d54f2b31aecafd7028eae0d99ba690ae54fcd3ebc849b89a76ff99f1eca984f153c1350a9a2fc164997bee4c4f0466fbc3b435a93e5785e3d22dfaaab32e84604195b3f3fd36db38b3d6282bf66f94e51d4bd7f977965b43da477b46a4bc011731f6468892acd04bc70c73c919af3412b8de35eaa220847a82466156f584392d1c04bbe384b6893188b1b20f08e15772818e4e90a0c88b69e70bbfc6a09f9bdeebae5ca17da295d08825d147803b80f1ac60406f5186055f2d6d22a7edc1ad5fb372ad91da3bb155a7e27d21bcb9bff3e0ba6b7ab4de3aff1ebeaa2764d1a6753a76e2135cda21581fd4293eceb9736ed7a8e80a2ea2e49ee2455e54e5d51eb0636fc690c9757b03dab6719ed61c2afaacbfdb9d65e34a03c6e19cb21ad7fa7845deb7603f00b144fcc70a86b81d9c7c720f3cdc3e4beab868dfb9f4da0fe890a6549d0a17254b8f5edc9b0071e406bdcd58dad279f71b9eda4c2baf1870f512ebae1a841beca75571e4a98ae6b097a56170b56c5ccfbc5be1f17808cd23d4609385feb73f883bc1f2c8033b184464664ceb274240a23259d2a2b9bfa30b41e9d5ca11b0073d87aa4b5c31f41a00281b3d3943f11c1d44fdb269a567ee2c06d36b1e0c36fa9da7844fcc4d952d4b0ad02ef131e6efe5063dfce5f3bd5985d69c8b921e40922fc5e947688c9e0452647888d7daf4bda4fff4c6ea291af9da3e2ca84974922433940d84df552409ba08b7088699d3f87d577cf9fe57d848c9def4af992ae6b3d163c6b5635a3c54f0219740354b08d8858f3b6479fe2639ef059aa7927f16d5aa3ac80c8d41fb3fc6f2bb799afbcd77421c6706bc817d1edeeddc0cf89a89ad67674489bb25b745b91f5a0fcf89d2ddd4771e551bd686347ba1eb76a67b5f9c2e2455c7d03dc71d242fa814c92f2d0eb56c1defed72c6699cb4b5fbf0ef14791beaf62ffacea02f37e05f25adc38946528151e1b692d6a02b176a0183b1d69dfc590bfa2622bf07123ebfa1a0f8ae677f0829caa1133319667826d4ead4c6ad69169eefaee50c9ea108682ada32cf191d89ace5073f2d3da0214026d57958974375882aa831a9568adbda9ce8b8d6183992c17aa78742f5a709cc29b618741fa9d2ff5f67101491f8bdbfbf0b13efb4fed082855f9acf0e0b756c0e2258a7d2f9ddaf0f9db8bcefad4ddd339f9c9dc9ca392d0d6ad227c04e87562b302bc532f245f71d785b679939c99a8bd886006ce77b2e26167d408f30316538c72fc8d39f1963d56f929e9"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r1, 0xd000943d, &(0x7f000010b280)={0xfffffffffffffffa, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x3, "bd8d7c6d3e5937"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r1, 0xd000943e, &(0x7f000010c280)={0x0, 0x0, "c847166acb619ee461109d547981cdfb0068ea0f0b6fd669d0546631ff205fec847f9b0dd5594c154a5eed85a1a4db2ac964343dce8936518d11bafd54c030beec2f43b8067b1b32fbc906f60106cae0dc16edb8b98fb80fa896992035c5226a5814efe3b6d14b78f6f1e3aaac0e8815cddfeeca864a1022b1a4a743f126e6968ee3b3d7bf3a978f667f01fabb59def7a90807be224ae672118ddac64e5a2c4e250df78297f4713f9ae193555875e0dd0d85a2a56f47fca4c34bb14c8ab2745dc817935d10644d78a7713b84cb854f4db344ffe684a26b1933bf5711770c2f7322d99948757c926cbd30e1b44cee6305d4a1fb12f8e405d15e4297c452aca924", "e45376549955f79ef779bae8f42f3464345c5497f1a7d72804e21a9ce0fcb998babbf55421d2b3e79f5ef64167ca03c3c6886e4e0e28573dcf43e2fa50ff681f2b588040a559c0e5b26f82ed3a0e5f0968741a2a8ace819dd2fa332202915195104b4f2bb7fd3ca6ea77578cc28c9ced5156446ec85174c195a99d84077b1c4d43865ac16c0598f176fd2ca85e70db6f4c652816ee8d19d988d2337177cbf43848192a3bd86f469df76db23ee053c8a72624b36e594c963b3131c9d0a38a4dc961be6bfb3f49c33974246bbe5f5698122926e97c20010eb0527057e37e2d802172e9d031f11f62d6bf454a01e5821e260a0beb924bda9fbfccf36a8273a4bc79830c11397ef66b7bb3b978b8136c2bc526b15cef6a8716005cd1064d45948b106cab4585cb6a0645d6ba859f123fd59aa502998731267b646230b183e12091d738b9a36705d8fa44ed6a1567963227c258c4bf9580d3a5fc6cb5581eb68124c9d5c913ff8b9a6cd51754c82e236a7c60a18021b8fb0c646da50ad744104128a9c3aaa259b9263ffa623985c84e6caf591cccbdac4e7dbf9546372deb948eb49475c8815b12b89410db8f9c134d8eb9acf2d8d139e36e0866d2bd8f1c96f543ac5df7702b644b3041e740badf9d82791750abce8dace3f1062a9a0a014c43096fd163bd78e2a003096cac4e4f8c87353e43bccf43437ef928251a719827f279a0bdfe7c2a2d2d6ef41841d44790f94761e8cb09b6fdc9a8b705652ed6812aff88d68afed496dcc29acce1215400899472950f165eb88bc118bce3e9f9c2ecd91e0f2295064562082fb15193b658821ba3d01cbe75c64870bf9799143a4f93790c15cf1859ea78729c76269e4d4debe659262a6104a663c370b831f1a82bdc02371c97d4438baff303cc9c4c3e0f1404ee3231f9b18210640f3d4434d5c7b78bdbb29c0c62060f627105c82467f9f3981c0990f20b2a9355670b0fb19ea59e24caf8410389372015f3de2a4a51a832e39402a266387dd90cc5eb85c5c15ab9cfbc9dab6d3b9245d567045743697228b255d4fa7aac06fc798bdba01dc61b81bf7d586df7a8b28a1cf196d75852ecf0999fc5706dd677c4689f26e21277d1ddc3a81f53c648ffc0031cdc70248e2c193c29afbf071fd6a58d150a23c49f8281ab065b6640087d1a61815d07e5ce7be012e329fbb4e1e1e422d5231bd4065b14fb9fb0e1563c6b31c323790f5227a67a589fbd72be83e00178ce6e7c73e3e1d9fc53056baf2d6fada8992a3dd003e80177a83ab61f066a061456337d202d5ea0eb3da8d598ff897d54517953accb08ee7ae3a071328d36ab0bce275de7dbe91ebd6de61c8f7ea696696cbbd33ed50822ffc64b19faff969b8ebafeb56e5b93d1dca63fa5fc1f5fbcd36eff714d132d0a13ec885a57bbb851da1c9f3142d8f727bc271e56558d649a0c551a77b36b55d52eb1552aa7db17bbf0eeae26f3ef472f9cf797359cd221a80550e0e0c1b0fa4af51354eee1732cf1c72db7e77270d135890e8d6c38e9bfd9aef9c259725127b7bfeec22ba984877743869ccf9f0cc1f580d0e2a626e10e4fd83a17f8626400a276e46ab997540613e9c889a7e5d351a6032f2abc47545fa39b44af8fd1746d764b91621cc967b831c5657d133792f0c11f86cad0bc64697236c5a7c63478af7a44bb5a664a5f47f93deceb8e8dfc5e82fa8fb3b8cb1c05a84882f5c5aa35c6cf78e397472136e26907229dac7b69b10b591b5d1f5d85af8a9f5a574af37e6f97c00fd739567e2fc3cb10b2344614cbb9e6c68895bc5dcb1bc3dfe5c3993ccc9043c5b710618021ba011764cc16590a799f26f359b3dffc2caece18f9779c222ac49e99fd6d19ab2d6df1d5a5a11c69ca0792bec22a73b80259cc84fa50e598838b5e0807193ee63965899ae0208dd3e634185cbad45c0ea368d95ef4218cfc5a9545404f7c9ea576effe1d29703442c34b953830029715092fc639ad3512ed8ed0b2ee31dc1dfaecb9a668742f53f5187ca7f7a32547811ab3236c62e2b27303a549827eb19d488b485ae07bcd908cab1af49db0a3b6910b1db6fb3db53476b722b802713bab1d7e76b6ff69a98c0fc2227d4e467591e38b03315f6eb0646b28b394d17c26c78cc3e0d2d87778521f5bd568e57038dffc841545645fff4f03459d8638bb22f209d7eaea3dcd9f835a89fd2ae2468f08168a26bdeba2a11f7b093feb40ed584a7b8ae7ac8487540a6dfed00343951ccdd67e9b6d6222b9cf277db68fed167ee2c4921ff8fdc6d2480c55aaa9b45e957ed8f28e80014d4c756201988a420496eb17ff9117ff51e369bb99522c7bdd229d46abc6cfdeded96a7f37d6a08419a32f89dd37e6adac1e0747779c5d1f261b9bf7a8e67bffdac5560ab5073fd3a127acbfc5a14ff20c6fa3a7f3d388f0a860bfed21c65f5243e0cd32028f84ae3505a9c97ad3a75de11107da7ae64d7449c59b4eb9c22646d0dd17025b53859918ae2dc2a9f080459701a1258baf7d648ee08f3ac38888990df027cd64172fce30cf80d9397001d4f15b5ed2fed5503f03d7289b5159e0afe8036553d26d9472047d71711eacc46a670c532ad5259d0a9d7307d41ae53ef77806c3a22d0ea73d99144c90a18f4ea1a93684890e433f24aa7449fe887204bc97029ed8501b62b916deb638da0af34c18a756ee02288be3f6e39218c6ceaa1077ee05937f37eb29e8f6cdcbb49c9b7480fb433486ca9562ae8c5c32e2a9bbe1fe92b68ad2d5a565386401d55c32805c1f68983d3ecd64f9a7fd3650289eff4314986cfe3c3914d86d470635dd1fe389c27823917d98c8b5b1cd1ab3f9400d5493f3ff95aa2cd57e5066bf87075f2b9a7623d3307f034b28bf60304876cd5de2274974a4cbd431fc152c291a821d6803679d7cd3e758b5e071e6608bf6f06c3f8925d215f5b7d7c17aaef5a612085849597512277650bbdbe79660fc744f0c331bcb777bdd69f719316500cab4ecbeb66fcb251f7795f318767911d1a6e7f3ada0f93ac927947cfa479d7d20e972b0d15b1a5d28d67afea9491ef5415f98f696ee3723602eed68793b405f1dff73523165825b0464837b8146b5cc4b6621ffbd2d5023c1eb16b8abe09151ee51daa782b3469f0c357b8a04fab73df464dc34cea895cb9ad5c9106890931435e2409e51277d54055069b4f1fa0b8db1fbacfec2622cf85a681d1de159e056f7eae0eeba739a3b7732084cfb99ec5d686d835f0560cd2d8838bed6ec4f6c1ff5e9c5c06a52caf1687753c5da5cd4194ae1e26fabe435cf7f626c07eae51bb0db5f9d4d4e5191004cfc422b208716fdaf69384f282770786d7bfbead48915580aedf495a434bcf1022b5bcd86fac09fc3523c86cba697988936a76423e7f7524b41a7bbe409fecb389f77d47fd26f38cf7ac92298d5c06e30956aed0226bb8f13ee23025da24161f886cbf806ec431479023e6acb37282589120bc1159593466edab5c2a8f4b2d931042a3b621f2a90d96155963e85521fef279835a1e2b0ef18ff57294c75ff84cec261ac064c23acda048d51163f2d10d33d3b929de8c7d6414e97217fbea6617f25fd80cb6eb51239a0a6c1ec4c2a250d4cd76d85609bc2ba9beee07e6ef80a6c569cb87ee986bc4794e909d403bfc82ea18c2b3221620af66ac56b312407c64d182b22b8b8ffcea91c6df2e1425027ab79d1c07c87479d050e94d3c54cfba1f770c723a934b95dcfd84bd7bceda961bcd75d58e3b223db22e0ba40ed68236a80e2ec87d6094882493bade7450adb14c0ef76394f2ec8164bd7d2137b0ee7e8e4805046d49987b80463fb571d281abd3f627de5296bed0dc966c0f5da965d489c1f0624378ea764ed56cd6363a12f7c00c6812582009c4a794c1127f159e37af66a26417fbfd8f14eb0f5ed1783995fb2bb2bdae846ac40ac4e56beb6989b317557b9bcd953b5123c624d544fcfe1674c95dda8605dc0a4641347297433eb365b89d7db673c9e902696cfbc88143992e03566054812febb9f87323a315157c41997557d263daf1406a36988ce5e1c622e6b350d7c990ff7b8890a27657c7dc859b3c5cd5a69e4b00b896d9a33927983d9b547b3731554ae479598b52a87df11d9109d38b4b8cb6ebc045066242238a3cdedead76df1b924e2e689fc38d70b2810031b96c1652c777f8326fb3bec865564df6a25cfe9f192a5f8f89f8bda191de99a3003e0ea0bb06006ed8c9c712fe77f4483290b2ee1f2a59511b26c626c681b789a01f7f2396bc4436eaafcb2c7130f615729673a4fecdf72b3529059e592e57eb431c72c5b2543eab821f6dfbd24448018ce526c9924edf225f54901a9126c77d140c88aed9a05b9aa86aec753db5ef3be78be172bb68f7ebf1c328798075541e26c526cbed9bf2cd848795e622b02542af266ed13f0639dc4e84674c09b872beb017b31ac48975e4af7748fa18a1531214a2df62b9e4bf969ec082c9de5da756dc59df9d29c42309f6db8e9500db2653ccf5917c2c2d8826f828a2ce986bbf80fa38f6dbf63404fd96a3e89feb7a1e2c3981d090231851457ec47368e7dc52b03145a8947c16af1d0b58c343bd6f446f213b604c52d911b46daf4bfdcafca6bcc5a3354b12a2d537c57bc48762d5791b2e930bf0984439757eab8e49aab9460a72cc2eef25350dacad07baa1724f46364de9cb53a248c03e093b854275a434aa4505dfad1877aba558896787eaad9ed9c544165d567e617e19ce09241494684343d3897097a67846bd7a0cbe74bc3801ccbb3feac6a2716eb8ee11490d782bb1103451b005053b0f214b828db0d3165d44c5e3cd6105cdf87bf2a5fc6fc14ec0d8886cc8651351bb6f20042be7d1b407bb6d89bb57123756bd0bf10ad69a508e3620d082cc865420a3de1a78e17f1f1058b3c9f6478ec4b773fe6bb51586a67af1e0102b90ebbf3ae8a7fc1ba67330dde7729af378f5602a6fa3c6c4514e7927ea7cb81728231e2826bfd2d63aa9db542394bec789311b650b52ec87087307328734e5c3789086a9392d73e39fe8bff857616b480984c487955aa95c700bf246744c8a7669a2a798f7c05e73ba69ec774b07e3fec761d8c6dc2eb03e0992273ef94477ad19aaa54c1de4ea7a650aa9dad9c788784e9ed1ad7fc30855dccc978b1e4f70e24ef55e038e16e9f39d4589b8a6470df3fed4378a63b15623b488402a69cd64cd6dcca076636867e3b24f0d265cba0b28215cff437e97a984913bffdadcd239675749efcbee42f223bb0becffb494f21f98f662a2a50b2a1421543ddf281bde8e8fcd73742ebcf884666898bb1dd142da1040824dfb7b61ca694dda1d37c70fc4b7abff5705fe38d7af8792361d8"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f000010d280)={0x0, 0x0, "8e3e2c7f126af40c467d518ebc9d5f57dac865bbff298f13e0005d75eb891dfe068ae66cbc47b2371d488ed5f8b67154843ce19606116a22a705306a1aca9ae3290a6231382bd55c1f5450ff1431013f79c2e8533ab151ca6f8a6bdbd4c3b7eb53a648aa52302e564e993133b1f33573f03595fa7d1ffdebe85e4167baa799013db4940c14352157e701acb3bcf3aba346c9c8c781a924109d63a92dea9a3bccd7b561e30d93dcc961cccf009c8b898ba0f9bffabe6cd0d341654f724c40583ee3788dae747b172951bd94f844dc5ff66d4b95855a52961438a99a7ed61fbf16e0f9759ac2d55857ab18256d6ff02ac62f6f91f71f015c856fe1ec56aa8d0ede", "fda610a93894710b843c035a2a08498f84ea7919120e49365f49b04cead69b82ccee0c08063d8f38edccee30173b72d0fa534ec65a039fbb1006da38dd51b29f8b33bb54a46e947dc37bd11a4f11f17bea61ac5349ab7c8bd9077fc59316b7fce7df3937c8e0230b03271d3d0d96e86e486a7697731ff600cc5e2a83b178d7713b994593ae6a274bb88446145bec40952d2e8725b59e62d2465e11b58af1ed6f19721f56c08825e7931b047237fd5d122ead8545c1c06ff0bf557cc46af94033945259f6344321a43cb5c4e6945445b3d6337722b4009e9371c34c42967deb86b9752cb862b4eb2bb04ecb57a8907843895ac083267b418d9a0b6baf0ff551ab41b5eefca6d590da7cbcd5b85f90e7f73b4183032af67d59075b734645f9918ff50e762bf6273bea3b686e8f04a1f34217aa8fe0847639dab2acc15e90fbc8edc87476b09e1a91e30615a73f45743766bd2fe013264b79e436a666f2a98d3e718d5456b89ac6fc0b487767d69f5c7b73fb106e4d17c83e99f78d441b44977c542b8f1caeb4bbadcbf882cf7e07ed5a6e774d04673bb39a9165831e293e664f72cd5904e44409ee61e77860502e4dda41a9cac3e9c64bf300bb57d3efc5de4507655edbdc935bf2a3a1b38bf0b4a08f3ab0b35045df9384885ee44ec62348e951c70b55ce0ab6ac5150646eef885f45b308629a3d55c65e19154a8a982e5df25c511d3789e4f24b42292695f74f09b54b69ae85c23d0b250326bc14149bd81e966bb759ac55e88b769ee3ba69b4908e16ea48a58a639fc1b9c15a278470fd178e05ec02e78b9c9ca749a3d7888dff8cbf0ccad1fc1f8006e78e93248685fa9a350cced819a79756fe203a0191faf21cc35b066e68c6e3fc5c55f8c351ac3bdf48620819748ba7002b65e5be5c57b76876a7795676b15376bed1d2bbacd0ba8de6a233a1a8d5cf315e0d369e6d3b30e139d37aad4d8fc095c769663df367e649c120e3018ccaea57f728d995e185ad4d8d72dbf54efb895bbe01cc363d688c12cbd3f547ffaf06de3c0da5f9aab364dd17804f310b6c6c89f3af611af9e076ce5e2527b95bc67aadf2351abdc2958b70200ad662f84a54e30a97a6ea00f18f3c499ec3a4ba055eb061b8eba3a969e18cf4cae2d90eadf37bfc39ea2651e7a017488c1600073f4fcb72088db1cf15e941a0e8c56f2963cc2937b3b552e77edfabce75fbc677d1b05a82a4634c83176df1d6768fd1021025f05e599480468b8b8212d1d131659001cd37c4c388c7f2a07ef6f2f2e89eeac581b3d827434e571fa431ea1e2af49143377dd37912012108204b158346c7f34fb36705742847fb79a16e8d2a3247138cccdd3fd03cdd50e35945d28e31195509834e856df5d9780102781d8e8aa90aa8555340d04b3e2bc2fb8a18d75c82b34a9843bf2cce381002d6ab70843375155ace706b0fb24846bab77829e9117fa6ba7b6949080a82a1da93d57ee6b2f6543cf785be23969fa180caedfdef08844d89e70f4606b2c7f2523354001fb0e3c45944cb61a8d9aeb02c531fab275456cd9e68e1bc9225e02d074592be204f541bae0e535d48aca2f1332a147dfbbd0482f6b3813fb977e85b35035d8dd03c26ea9c9b8f4214fe34d71aa26c5a765b950452bb8f92eb48f8c50a2e83e3f5da743a876ccb57b0afd14ee4dfb1fbbb1a4a74972ac511f7ff97fbbc3d2d19ed9dee5e4a4dd4a10605233a173dfc75fc92f92b0fe821d50222e9182f2cbc4eeb02bdc05535e83344c94c91e078a9f273053cfc81662726fc7d1cf8b8a89ebdba803251e4a8a5795d9a0969f13baa251dd2c336aa2237d7a8031a2f907cd85c2b75dc6afdb2a342dee98f30ee6806b17435ae80affaf851f460ff2bb34b9ee244eeb88a2cc5f5b440b1652ccf69b4a3312a1d504d4cf42d7ba1ba247e8b794665821ad17b909dfc7954ba6c9d285069211899a8ffb9a6f948489d322e7dc8d822b5c26333586c6de262e0c9e82acd3f67deea196698d4796865e5d4ee622f835b7e9be5d0c4c41c44bb5de107aa1749521e3d4064be2080aa972eb0f9b354acc133ebfbcf1acf285d7b1604ed92b4bdebefad134fa9a668224d90afbc2a0ecea66123fbf947ade45dca148997251fae3789ed8242090d107792216b351163c71d68967f33ff39a038fd9f241803bd2103827481054cad19a2a495830a60c9d45865e55002876108148654fd89d64e668d45cae5ed4c371037b86291326785943b1d3f56d5033bc89d328bc2273d64753fc6a102236f85218278c168037bd939d7a73f771fbeb804e0c4e875dcf7b56258e528d44ce5daa606fcf5940073a7361c176cd69f8477c990e7e91dfd1b7ab7e0a6034c4a429f731faa5433fd3c2bedf04aa2435e32fa7c53171512280e0e56d8b14c394eb280564124e865279a693799a73aeb4fb81c692d4c8961db0e305069c5b6e2af505988a045d64c084a2085f911da49aedeebbb665724aa51298485293c6675b788da992c56901fb81b081488abc475ef0e60bdc049e1064be70d1fe267dd2eaaf91031a0d0f85b62451caabef2e1bfced9edb41fc5deadc007d5c6675290f538245d642afd9f8329452439694a06201ad6807cda8449430adc59f99a781ac1bdfd2a3746b92b436ac5fb1ef6721886aac8e9e3bb8ce1b3a3aea1f70b9bdee99a57966108faf378f2b453e0d8344f2686f028b890962ce4fc06241b6bca981cd097d66c298f139d699bfdbfa698302f1240f6df2484d6249cb1df5c170d9bf090535f751e0e3c949ea15574d8f282c8caecaefaabfb91868e77048a50334f39a4f6b264b1cb7fb7dfa3e752052c29e93a1bbd3cea810fbe76d6898763e32b40e2835854214b2c130d81ade004c52137397ba41e548a3898cacf655b33c6eb2934108f0a55eefadbbaa093ce52ce62ac4879216dacb7d4e5ee1702079a6c06306875411001b0759bd7cfd57c278b4d893de608bb2f3f8736a948f35bc357cd482927e6f80ffcef7f556da4766231f84ec0f69e6235e5dd434628e7fea06bf31dd258e28c875110ec136d82fa5cf1ebe071fc5faae75df77c4e401be026a3193d72be005f70f2275e4b829b6039ab216e200c60739fb5513adacd113d86489b549f97617958e5ecd99f76eab7045785baaf48583f3a0684198b5304ad5901c7ceada492b4f6a19091f935ae21a2bd09eebd1cbf434c9be97cd25a5bc7f80978742bd98edce63eb40cfba2c379d206278a73dcffac6c375d54211cd7859151fe6fe9bad1e6423df05f323307fe026db13be15079bd57de50d5e9eaf0fd9408b011fda201c2b38304dc300e9cfc1975b6b50f5431a867265bc7c55c55ffb88d000411177f3511bf5e4051b25d7c0687cdbd578ae5a641f0023b66628c86de3f60a247d1283f322e40408963d3a5f1812ac2f6c00c025266fed1f4e934ea4cede62305ce6089a886e021c1737a852b2cc33413f33adbe3ba36313aa57c36d8536cde6b4a43a62dcaf8bcffd5eb07af9ece451c897a1d0539989087416df652e241201145a9613a2e7f76cede34dce2a9274dbfd55d224a23858c971d5019eb3bdada859488e9d25f385218d019016029388ab421b9b261bc6351bedbffdb876df6b8ded2521dbba8cbfbf67690b0771419c95b98b57de1d3c37ed736be573251a6563194b52a88d7952f79d03df62abef1eb2cbbdeec44c2427bb3b995ffe045b5993868a952f393396848723c5ac4dac43980b635d4c04a582810b70d9043f2b101cde6943b953ccbbddcf931df5441eca91c693fd4f607142e578c6b6e12f7b2933838d00eb2c702d5a0ca655b759a76230a0e8a90c930bc8a6ab5ce3dddf975ed5cfaeb4ad7d3398b9ede1cde68d0a12dd50aac798b0b0e3fe5e6d48c73a3bba09eff15026e1626acfe0a3bcae73dc3b21c71ef90be32aaceb7e9bf5a7ffce69a0cbf35cd157281d10f337e89c40da9fae6e607279e249a1320290e9f49fb7dd822169b11984e9602c7a99203320c27a60036128d243fc1574ffa24eab13ee3c76dee4dac1e7265bdb4855860143b406472f1d701c671546889d49836ffa2321dc5bb97c52af9cf3d1d7633670508d028763815158a51e75a8520f7ceabc34aaef3519c7811b0a98a5b17b510fdf42e935146a3ec19e511b5a8ccf7c5e70552e94b1d2cf257fbe4443df38984e3c9acc2e76ae037340d8ec731f6c91ee9759f883a89439c5027700e3cc832d5c5aab6cc7741a12d3ada0f9fed2b65fa1dc873e9fefd49dfa7cc4b97a3b98cc60d9d66b261ab0167696ce71b9532f588b7cd61c8817dfa98179e536c41316feb864330fa8e479b19874ba4bc768b6957dd31eb15442a8c35f297e19ae90b30bd3e17419a629d33a14b31de1e44359fe4e5742c39ebaa974dbba3b68f3683d60693d98ca29dca1198cfa3fefddb049f439bee15c6f8fbdaaa61ffc46c38a3d6f960382de63e7e6ef9828d4a8d181c52bbd4fcc0aefbc855d0be554dddd116d06fe2c58da546a9ce7e4cd52f81c1196ee762a44b5f2707712cc3af148a86e5c32184d9f07fa42a0a70b59a7697fdd2f369ca9963e42a91bdd4cb6d76d73a6e508ef8aeaf1f3f6f5e04e6a85080c54a5875c876ab83b30c7d36690698eb991f0ab93c0a718f523dd240e7bbfba2240e279fbdc0309feb31b9b42f6c4288e5fa8e6a222ef16bd60b0dc4c8d329be220f6f4ce4fb308cf5799173edc1d6429b9369d1277f5fc01ff1a3184000d6c781be98a461f9b8df9977a0a1b771dddd2880ff8622a92649aadb3b567f65716d79f5de56027a749962d398260a51740e30598e645145e9fcac002379cefb13e49546431ff5dd1050fcda90064718901237aa1f84c7fca3c1e50e557a107fad9cd8f50082aec58dfb949355ecf0b192e307197a8323a209be51fe08379e18179e5f1f1cc2c4b6f6b63b3cef346985d7940e313af9729ac1d91c3fa64726b3c8fbf7313cfc52435ca371dbe76fb59033ab977b5a566af609fdca57f9961b7bbc40b891e5f2be8ef3bcad7e302a3bcbff3896ad930a696a821809cc69edc1bd33747faf1977e0c1b72bf51aa75eeb38e2658754e534f79bc8a7ce96a6cc4efd61c46e008e879a9b685ebe68d14ae58b9879800e84a641c8ac1a8a9bdf090e9e8c72101d08b6548371a8c7a57352f5d3d2b29fe9e16499327402e0fdec2071ae0eeb999ab06c953c5a2d2639351510b095fcf0c6048137377872ed307bdb382636d547d68f05f8f4358e6c7c5b101dc60fe320aefb945c3b672ac6a52b2490efae6de8a65d1672396902b92ba86a3763844f4235197cfaf29d3535a93671c2b413f0b060ec0ff53249bda17210649d73919912c8a11ec2dfe8a34c3f593345e411f4c50663"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f000010e280)={0x0}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r1, 0xd000943e, &(0x7f000010e480)={0x0, 0x0, "0246b0c8dceef1c393bcb83a4e14dd617f2b7dc0b1cdeb830b690667e3512e43fd2e77d83d0555df9e7cd1eac3dd2092008c2e912ce0ea529d9579a138101a1748d6048f247cfad501481d6e57ecddbfe87bae30253108aa44dfcdef3f8e74ed2842fd9934fb91b14b93200ef227a4ffcc2a8e9da6d3294cbf47c5b3dfd145803bf0084c6ec343f804d1602de6adb29048030c38a226d45c046134f48b148dcc3de5d05173187e0aec7c0396c08804716e840612225541567ea9ce2914de3d2af6a55708cd9c14cd160768441277414cc247d4bbdc9d519b0314a7638fff57bba8769d5ce3840f3b5040fc6032d6f09e4255eee022e8380bc48416c369dde48f", "86157c71a2653429370e42c8958b242cef06e1d09a445724a7289f3b1625e0b253ee1f66d0d3dafeef978c77cff305cc17eb40c6a0df1ff92ecf2d9bbc4fef8ad7b1aa34692e229360c3cb199819c7701f0d2cad185db7c27d84ceeccc58253de5a23159d1c5f538c3cf4a99ed3d0538ca0d7892813d64d8cd64f98748899d2d6166cd9ae01b20587523ce10119dd5e6532eba1de38a26b0ace843cf7006e5e38d80df14b66a2a999496a1c625447c6cba48a9a6d8c13d779048b30d80f38c9fe26a53c01160f0655890976482f7c957d5f7856357bd05c7956477093ca1caba4d39b1da80e4fdb918cc06d2147268176abdae3e551880e1d5d67a92f60e6e751fd4f7ed8bf6f302c0c98ee9fc9db419a12583fa3c11dbb49340d91ad5d91a4e96488d8ef6712743fe251513a00ec185468ac59a8abb95fde694aa25b2a7a3bccbb02383b5980ec9802c6261f138deb810954e87869941c7594e693b1cdfc9904d0efff040d415f21091622f6cf8d5e5efb28eb5653bcc5d1b14fe8e961bb5dd75cf635830fd0e4e9caa858b87d5b72f59425120377594c291863de558e8a2650ad029901e2ba7950799c603b8ecb51508be88895895371ad9220a8fee684216a1b5da5bf26ce24c69b8edb3d9ab812a45b5939f805926a34429d6179f72ff15c5983a7d67291d32ae431607513fae025c740e0edee219f1442d5ad20ffa43b98b823bb82bd11c17a15ddbec007a14809774154e6c34c41c169f07b8e0a2f5bde73e3a511043791fc60b8126dd1dadd62b339929910a198ad3fd7dd1bb17ac1d68ca9901ebcab7f62fbd844967089584deb4e82e483aafd2325e11f16f54e565ce8d06f0edbbc378d59b16fec7400bb9962ede9a3498f8bd1aafcd79554caa29d40f195e88403abcf5a78cce819be8ccc8b595043750f63c1eca4542db765f8ee00477ba4e5a08f5855c9fd6ff2059c172367d41eed5ed241ad686925bde2a9ab83a185a4f4ecefaea23bbf8ccf3a00c241a485b985645afb0623f353c77d8e391135074bba0b29cc61339e8b57558534480b51d088ed9ca1bf213870d37223b833c89c478087cae87e6f37f178c66fd023428147a6849ac5f556b5bdf8d19f7ebeb3526ae5c991cadad0389b0a9f9cab46ca1920e160b0f843d14bec7a7d7fb0fdc0d1a0d8fc43156d9d09243232e16db1609ef196bac8ccdaa4f7a57d57db458d6a273253b3c6f9cc0480d8f236e878a51874201c96f57b8afb6ecd8e18fa10124bdd40c5cdbf7e330c26e8716d1f0b39aa9a4191e3e21509fcd3f752557b37cec394b9c46197769802b377ab92f70242e8cfe548f216144ee98bbb5c8b0a6cf7126be1504c8feb67c06f28a31c4194d47f07d961bacea0fe8b1521481d115dc87727959a7a4e25936eec60af4a63226e23388e95062432204f992d057da63246d10d449af23e163e1f5cc73c24d2bf90cc6e25aaf2355d09275e1e759a207b3dd901a815cb4b2644c09440661b75fad7f8bb680d5ffde4683c5a3fcb0bebf63eeea09bb2c49ba539a4566fd75aa34701debecfc2780095eb1acdf8c86ddb99917cf35c135824eeaa89750f56c5615296e240ea8e958530ea37928b084e155c5c4a298867be935623ad5930986b7bad59a4d4fdf25e6aef561017e85343439bb4d9b946fd0156eded4e834470ba93668651b81854b184cd57b729b8b601efdfd2a1e79b91708f7fd5bed5ffbc18246d5ae0b5b560ec1e3ad84f65b3e355b982866a578bdd1d7a1919cb633edfc4e9acb718ba9c94fe4c3ad9f78ca3d6a8386379d9c82e2f146e89fc2dbf9b22a184d99a1affea936d43eb8e97451aa9d5d06661166153129aa0d788c8a42d58866c621fb392870c70b6514a38245bcdf444873fdf347563271c6f27f6b7a195503033f190cf29795f664ac6721529e20b02d71fae263f578d508ef5231c46bf284e0eec12c499e0ae2e1a4550476de0f6341015f95d36a41060501122d29d1a94e95c26757b1af928b6d7377f89d3ae182c697e2b45bdea3573d88c5f345a2b44a104fc53cb5ff1b5de2192e4e400ac5c0020c0ad940bdc2bbb50c1d8901bff9e7a94e856d2043286ec0a7b347c5418a63836a928317b73205a51370f27d7210250f32d20abf97fcf227405fdb2c5945b0cb8605bd28f0e3c64d44d9d6f3836cdaa7b5c49cf01203538bd6387d91c162d0e8bfc060c40272348abbbedcff836eb7aa4d568ffd31dd13baf62ade9ec4a69d424e937592d015aa659c036a8d3962126b2ba685dc97b97593caec113cd1f4b9277050ed55a2bb765946a2349515eb4caa3938dc3d1318cda69b6533191e0efa37bd7edcada552bc472dad9a0ad8956e10c08c1515a86a204c88c6b62ada9068b184ac7012ddf84cdf07b8def613e516f2e482ceb1825eadb6ef34a580cc7f6de3271ffafdeb816a478ab07f43ac61daea713118812a1515c95886463985a04fad6ef11ef49c2c5ed1fb7d47504bf945381eb8e1623d9ee1fa0715ee78b1a74ae15cce667990ac6ad1fc29932237037c1830a1864274082f9cc8b6746153a269d8d2d43d55081afb51e6c498795243a9a696cb1cb7d3c455e20fb11a1a34de07e010f2cf4dce097ea556ad5ecc9b124069b4d93d3fe2b4d9570414cda8f7bce94908c72968cedea11555a5e5c366ea4153c91f4e978b76f92e43bbf3de3372f1b2bca04e0d66bd26463226a709a184d51c33f6a74d75b1b8d1ebb0f256ea1ddee10db4d36c471a3013ccf441cf80dd93a7ca909e07a7d35df847290b7c16c31287ecc6e5c32a58679c0820f91172505f5ca554689807ce630e9d8e275d520fa2dc49060482c3301a16dc86b017e2d4361114c5a3cd862899d149f31a8231cdc0dc1bd313bf0695dc69bffa5b912cfcb89aa699846695ed8e83004a699ee7f705882209e143042fbd3cba51b8335b3b4bb85e3ff4807d77dba2052d8ad904b5c7143919e3d4e9d86a6852012f8d2b5aed39b21ab26649c131bf0431d9b7124bdec76e71012d22a9011d30da085114d1649e0da5b83c6b2269784ed337a72637d301132ae45109c001b4ed1fba774dea1c3e094b9e7feb79781c7111c56caf3fc723afa88977631f1654d4553d2cb8fc880a8893906b26bdadfbffde3013c4390504f7b7e541c21e2da39b7bbe2068636e4cbfc904f8dcf8fe03013c39171f6d881dda81020eebc41591bb3799fff3c11f63685f1cadede37da3dd92b18aa84b85babd84d880c5d307a18a27e92d6747c0a45ee984961eb34bcb9e647a271099e0e0b9a1976fe8ff3519347f8e15ffc2e277b93a8a0b5919f916c3008a3dd4aae8baaa7c3491addc48d6204c7ed6d249eee5cc1a7c2073cbd2717e4179e034753b4a0818944213b4922de9985411ba8e630e69f5b376365246cad4a53c5bf37eaa36724bfac15667734f3de64d89ebcfdd69cc30e1b7165ea21065073aef5654ca2d31d9996c896a1fcec9cfde607ef0a87bfcede76c2b3c771a69a2fd34e9a14691d2b2fb5f49c8d303a1e5a196fd058ccf24464f71b7bc4c715e8e59f29259152bec0dd76ab6023e3129a945da9bf489bb1481cf77bfa4950041ae8d831cd2a32d183ccb64c71df74a971e6231673cf3504b5c8c7f5422cc4ecb95a400f179cfad6b43adafee865054eb9775ed7870f6853421399e3d4b0dd2a85bdde124a29f2b7dbebd4e3c7330d69d73176d110abcfc7f4e49bab946ad91c93aceafa4922509b8e0da82b64f34926cd9d2a6ddec0fe1e48c7f2930a238937852363c09c01785184a39b8a6f48f001c8cd897973f37087667cc7838310a787dc8c0513fb2e41683af04cb9080c7f51295946549448d7c47e340e32dba2923ffa386f1ab5db817edfe1a0a152d5aa7f571f412064d4054368aee6781b34121d7d88cc8ced106a0fa09985832b97139901906a6dcaf971f778a8dc2e42e1fb042ee15f519631739649ef93297d5f96a567eda4580a4dc2bc1a8a8f2217519f728c831266b6d40eb08309d4f9e82c667c98ab50c6360c58f776f3c8aa7ec89529025af955b008ed007362f256353dcc909415bd0a24406078b9a6baa31ba160d684efdc0b897c47a8158ce0f8ec0e838b9b36836f7653784f23d76f936011bd3f7e701e340bdcdc8837bfb672966bc6c12066836feeb37d1c0a35692d19f0b5200a7caac427ffae284aaf8795481072ac4fb9f929b2d98b03100d0779663f246368c75c9db848e9595dc265397c0e712bc53b4407f0e6c210ca074515d5bf82f4c31cde791b084b2a0304c44cff4037f0724627798ab3bd3d0dbc0aeb7d4c9d641cb085cfba6dcc9f810f7162c03fce97f214e94e6c2df14264b0a256f0ef0620d53b3c4e7c636aedebd07f1baf478badcca5f2f2cf73d653157a2550bebf325f6237c4865798bb29d4ed9f28ce61db2fbd6004b89e4bba252d6aeeec9f8531e7797c577a245721e4457fe33c82904ae2bcb7f65fcabe70fd60dd723315a0fb3cd5db45ab0c712a2c8a0ec647f8656076b6dd57954364201eabc1a39b2363964c8f0e78dd89d16b58f7df193060175a8c61814dbb7ff7c7a30ce65b1c00213d16593ab4425c13650900c8386c7cbf55e20093253b6ea667222140556f6d0535f65f28fbd009794846c46591a45ed5c14bb25c96427c8b8fa196ca98476d96c8047c775a22a7cc2bf82d5a7502f7fcadc6bd53051a0892324ac38f325d05627fc8c0612faf19edacc1241562052cab6e5d40166b912281a6c38cc2588a5fc154f9369922d003398376ecaafd40e7c24843efcca58f8fd99023d2a11be21ac9660a111cb1aaea11e20404c0945c4c263a99d52dd451c7e7134c7ec4920a8e1202d765cf1ead23c96f040745c76a6f7c1d93d836917bb76568a12042a935009d8e72c1a02307ae2064a3719e27d8c8f0043a623c08ad1dea5a24ffb27234e1d0edef6c3d8cd737c79fdd2ffddf372a83acc85cecaa3a95d45298e0605b988e29fed4621ec721083b3e9ed1e7c2fbdb23c6666cd298ea938685428c2d92aba50405cbb85d20be422ab3ca512e6fd067e04317be65a11cfc22cf4bf524c3eb321c50168e19f0cd62f1fb900d28c8be5b869e350414691095dbfe76d8792b47b4af90316a1e9f76996dbba9d8510f47da87b1d73f5e12ad93b0669132e7867a9d853664f4758c5ffd4c6911334413ec21e951681f0a6b2dc8db5456b4a717954b52470fd5017c50b522c4ff1cede0414219cfcf7d58c1de210a2c847e2ca6861991db7fcb9cbcf914728f4d8ecf7b77a6dbcd4bd2c658c1536845920e608ab76e0ae77da6b9b63cf44fdeaf94d2ff04c53cb3aa97628d4795b8b500aaa220e4d815c072a39287a6abc000dca7a6b8540dee90b5a1b9b40a953849d"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f000010f480)={0x0, ""/256, 0x0}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r1, 0xd000943e, &(0x7f000010f680)={0x0, 0x0, "55f73ca241ce0629825b3918ced6dfff47299de22b2dc6a2d8a1d85eb428e00611c02191f527460a87cee928c2e5e9970ba69bfa893eba7fa8e673b4bf3c8098f94e23f973ca07591194e91a221c93ea9850b659c886c26ac3440b9af971630929ed26bf651b165783002a712f658a028c517a471092f7b7d8e4861d5bb3c612c3cc2a4a651ee9aa186410cd5de58946142d3456c5a52a2ccd6ed9894bcab7b1b8e14178c4d6426dd7d26d8042979a60fb11104bccc46720c9060e5b998a9350e4f0c1461212f50ebb8e18c515255d4c1523af9dbcb78885e4bf6639bd2817d82ce0d2623cf4f93ea4fc3b0eefc94d9ed405845e6edae8fd354f3ae6d4378fc9", "30a1c405fb61f6a996c607f70d45d9aa26905f4a35e634989f7e9bff6ea87f91f20bee06f15d29e1341a3afbeceb336b5e36070e431005d21ba4a93ffb67ced38fbadc8bf933caac741ca1db8c6f296ddcfa4656923ecafecd6a636325f3f50f9dc74f1976e60e3d0eb19a5a5b72c0c8e15cea2c0bd52c7b48e52446b4dcc857e45a3f0b5e60b038ec6ca20c8c119893bb5d995f276c37f9264f4d8c3b23a1e379c21017a480829349879ce17cbd0a0e906a93b54682f96e69781723b8430bb3d892af8fcf816223c657535662e22b19b51efe09ad88294bed83428bea614ca709bea3c034fbc46dde1329c37aa870debc5b8a23ea8bf7c876b765f222172f91d46819fdf64bb7ac42100e600a3490de25fb1c2f8a3c3fdfb4f1e2a338b6995e48cc98b7799bdc4b9df6cdf6019b686c89c42644e61e8d21e8a9eb9b84f74bcf466940cab5442a553be0cad4c0cd6ee1918ffcb59f03dc4b927fb7d63183abcbaeaa006fd8ee77e2cca44a63aa4e91d83bcb8df2f117be6df23791a16b1b9451bd08a2f5b3d9be543e2dcfbc525b77c3e02215bfb445d464f828ec3b06da3591068664f371bc56511c43e238248cdafbabff847bdae88b1d8cbbeaa50b69e3ca74ab80eb3847ad6a9dcf021aaf09be52c46b896667b407807f7a1e66192f0c757fd408a9221199f7a8d30c88a01796a26dafe9cf8da2d54eb9a8cada7b5ce9e8a2939df8fbbcf8780a0d52c6ed34287a5e99d039c6fd5a1246cda45f44fbf5d7f037f02aeb195405f5da4f2eb95b66361a00551d9c38021509c50da522e26ecaa97dbe64e8e3599b10b1f789708a7baa3e21513e00057e1129737094a99ee59e835595f81695ac73129a76c41c0a2ed25b0ef436550d86176168a3b90b2adf83e429d27e4f1af9f3882d327e8c9e86f387c417e605b9abdaf8e86dca24212f077dbcad0ef27ef14981990379ab2f48118b4cdcf7323bba24bc7fa6082b1a9b77c2fa19fa61e92699cf27164518cb31c5265fd88cc065c3f550c216b6f67861a2ad0377e8c718ecf3ec2286aa78b2c2c8d319333016590bbf46f65f48ac42fa0f647dc68d4f10351fd5558a4797866a79533648e47af42a62f022300b09f8e8010d0d289fbb2921206a992307417014abf6ef419db31464418837653990050370f5bdc0891961a6e0283289884d92c02a2533a1a8efe0accf5376a41aa19f1c4f13571f488ab7b1c12138d37bc83a7c0b9aa085203456756065f43dae7bb668779ef3c56ed23259240d2f5ea554739769470a6db89cdc3ce19d73c8b775d86ecd5cc88fcc2a300ee7bbc3bd2d979203c09779724d11cfca84c8387034fc57faf6bc8cbcb5e5aa1e87c806e35156bdf2d74350ed0b12205618d161c88161ad1cefcdc9548c657fba01231f7e69126e3b42c5c2c22b2d9c7de41cd95d8d85ad43c94e370ca5ec6daa2702292e83d4863da60ee9f8a176db2dc1dac0fb95b07a50df25d321812c285fc832f23ce015c84bac6c97748f53c2a9a09a89003d364de5947fff9863af4c0c0d1594c0065026a8701fde8d4e2beeaf5f88f30c05e4367b63bec32b9c64e858b6dda63c914ec91a7ae8e391e645e4caf78ec6971876bd116c438acb7039895b6a3bb423f609e2ec8377f41062a6d44762e91bd48187c23e40e7056e569cf6784d9395fd3942f082adefae068fd1a941385a2c5766cc7d3186162d7188e3be43dc1e3efd8d3f2b295505b0c311c26cc180ef6cc0ee7b3641d70015ce3c250b14de4503df5420e8a0a4b3e5af59451059b3ea90e9a38a4176b8fe73d1b5aa1fe5fe7e1f5208bccdbece162a2f3ee1a79167a7a9325a9d70fe00a128e35a8a9c75fce8f0061460e23f773efd5fe1e6ef1daf8a48a93358766bf9b0e47f3ca840b696c2213378eff915b9adb32faebdba0c530474dd6c484b350cede56cdc5e610d85b5123aa38b4fb0ce9907a7a6908f4a48d90db2468a37cb288711824c0f66053591b2cdd8898b68085c85287e859ecbe3a2a1edb3fc0c52e241387d29c34cb4709473a3211448ed637488ae5e3648c6ccc337af9ed95e4095050ef69047836a63702ea71fa04889cb269b4da6de581fa8d15bec7943a7a2c848cb1e95431c90cfaf4e11156213a089e16ece6b1962edb1520ebb542d8fb6fd6c42b1d98a53f81476a7be81358d020d3270c0e38474a3ad37ef544f5fb6c47b5ad2df14e8002e596d8afa8818904741518e33f67be0b5465eb17267c27dd20d409289c560a0f141ce09b2bba194b830bb730fadd82525d26f20aeb92533c263f0b9581c1b68e10f7d33f7b2f386f144f5e687e2b88c23e745b1a7eb1edc2ed5a2ba88d4cef0577c3a16eec70ed6e337bffca0d60fb8079fbe91f003256f1fe807f4e9d7cb92ace07862a73edaa47c72c0ce87a91772ac3d64a07887588fabac26a02412468ffea37200c9fe68f3d8297896f43c75127bf92907461eddb2b5cd56f10c872f3ac25a5cf452f55b302b1abc592dc56a8b7e7aea84cf99d7a154878e4fc46a6191b080474f47fd45af3da5ab72d28383cb8b5240ce0d8afe6614183aa4f9a3a1c751c1eb347c947a890cc92b1ee4a281aa811ec52d7976bf5967485720ca59028fbffa4a3d06934f632185f2efeccfd6ba53dbd1b7ac3cfd174db7a38d28bacdbd24a4f2c2296a86e402d98e20f839c4adcf88b8af8e9523f186926cd941117e2d890cd84bc508ce5e546cd3029c6fc3ec9fe2c5e349473e7acf941c3f384917c64ecac1f10ae0fd3b28cd9b36a3a691d8e5b05455cfef8a10b06a582dabe50a24f8614f9f4404d7085e45b3ce46b5a291d7d8b124465435331b603afbe303a37a470dabf6dfd0aa46ba11527e709225fb1d65fb4aa84e02ae126c533a83f0111ffc12ee8da6e3e1c504c90f3daefbb2285d73b8f6ee205b854442b3a2031906ed2efb80e8edadd388323bb3874ad8fbc070221dd7adc03b1d5155944ab328be1179316dd742db8960e248b6258be3bbef4111090592332305608dc8a4c97622cececa829038e2caff257f286370ad2f706a5b93471189214b5f5dab6e35e13af3073c13b338a6256b33d821a0d0bcf23373e7bd4bb38785a06c94ff5dc7fd10b42d6eeb789febdbf7aa8b072ced106e3d99beea6fab2f13bf9f1986f353bf0e72e762e44c08c3757c55a651df0f63722e0c3d12f78a62e105dd01ff3ca215a5f90e12ee9fec552a196bdb5cdbd2f2bcaa19059e9cfa053fc92ec1063b563c24d1db95c8a28983f27f20d23bdae39b63bd5c60b36615fe738d722b693450f48ab1cbd07481f4561433afbdd8a8de782b81e75702c706cb608855882cc00f7a12be1acd176fcd09f067370eafc20f1eb43d8c63b53d701a7cdee771987b8fed756b7088605102d7e260ab0c009b274c42ad200f988f1a3748de176024220711758032569010db8b122e158b7b8f11b68bbb11cd6a8c29f7ff8cb38af8ebf9d24cb22ebd25c3de03eaa7265f84eb2c9eb7648fff2598b59c0329ae4d1830e6bace93e18b8014da321415982e397f2d176cddfba01172e12bb4fc0fc28092f08e91fb3ae489d80059c03572b1049cab660245dbd4f638e62acffaad871a9424eab80b74daf7a97cf7416796caf7a9e1e714098230eb98921393a3e978ffe6c0eec05b1facfff76aec07d2d226fab0eaf28562897cf5bad52a8a3094e9b9bcac81f2738e934ee297b0111a9aa7079d52347530231617bfbdcfb702d38a4832b2213db0507af05c2152e081a7f7ae46774215d3df287f4e715e5d27a25fae03b52e3a70a9a0f8863dedbf7b19cbe86efa02d1e5a3502e3dd017a86483aad52eb8d0c804ec7fa4ad67b358e2d56ab083cfb4c18dc337c3b7f701084b34ef034e614e6c862e128f5e7a7f9c29a8d7b189dd152b39ee75d2919efc7d7373449c6e60680a731a0e5449180696e7d43062504fe3c204e3f72f3c6086fa2dca710ecb99bc1a2fb3c74c9524c2217985b5b408a5932751c95f4aa2d890b7e0801dbe4b69f05afc6c667cf45b0790e84a3b570f4a321ccc9931a48a5a4cf1e5d4d2629dc68feb9375781ce10f0043f60cf0c48bae682c0c782d2f48e7d67a40dba0e35dd9760650c6ecb9a142b04fd7d50e652ccd15df991103e1055b95df53ad102cfe3b02f3e1a454a974a586920ce57b078d7ce0af21c8ab68fe519d761503468bf7e90dc1ecb2babffb47c07ee072336ce54d6c0d5567a2bd15c8e4913f536de53f71452031162488c58fd962b457358a2a9aa4fc8c96c6a1a996c35eb389ccf9b89a52965e8f7e86ae1db12a92feeed4ad24eb231c40859a7d4118934972cc1c5b229b95f1c208bb0e3f4a2d208c4e5c3a6a5120aa2ed95f013227ebd0c79ee59ac2742609dd497424d4eca6500eb99cec62d6cd569d7dd85d1f0f049faacacd489edb2d57b1220aa4e41b816496d86e0e03c8d5c7676bb0020cdffaf07e2785e8756774a1e76fd6ab3f293e110ad34ac544ee808981b1844bb5d653384fd5f5251a8d71fe4611fb4cf16268b279a8c116f0c1647c5e990e7f9e30b726f59e37b30e6e543eb4168effaf63b810bd0ae4de2c5f3de502ca797dcbfec2526bbb651f078a1c5dcf58fede67a966383702b45c828619005a9673fa24ec01041306383f58d514a52d8e4ddf97563e777f713bf4bb6c3739aebdbc56c5c515ae3dd41d8c7eedef109be532023199fa68b42250ec10e429592986bfb210489662108f9cbe88d6d74ebcbe38d980ce5094a77928c3b0edad147fb0db122152601994ab28fc546c49e864f1c7f164b5f7d8cf0c1b1d2b60df114fe15f6a2ade484bd5a4a408ca1ef8888eb401d4c52d5980a5322f2b7c9db93d013112af3cf709be2fc81eb3b59feaeceffda5df5345be9d74a7e43f57375a0a30ea9b5698e3483c84db4687a6dc55daf47a7b1e7fe126c316ba29e61c3b7acc6ce35f76d62aa394b1a068bd5432b003d1bb6bbc2e1ad69860d0e73234e6242d61b05d597447fc14496300ef5e7568bd54040a1881b6f9e6801a93b52008f6dd51958d9e7d0e6b42be0b6924027d58eaf36db9031744a688e1a9b96af98b4257b259fafbe7c2d94ed69962c2313116407db24c4f8c72fdee979bba376285626d155e67ce9ddf82da60a132c315474b3d93055ef8c34ce55e0768594de877898a6dff6f090af7ff6aa5ec683eb8c051a2190aef9fb558ffd49a31df0e59eac92662edb534fcb0fd211f8accf19fa2a796877556e79be2471878cbb4818983f5876ac85ac5ef57e4696842ef821a858f0b2d3a8f80711947de14f3ea494f7ba3bd45a57d87fd7ad2a4ea9ca39f4d90dc623c428f9f9f143bae68fc65acabd4bc31b5550620928153266d55f501c0a6e56a3e5dfc4fbcdcf976bc0a"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r1, 0xd000943d, &(0x7f0000110680)={0x3, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x7f, "80d816b282f178"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000111680)={0x0, 0x0, "78abaa106891ec98952476a1fe82df96b4f00c5030dd94ed1989eecdef254e65c37e5c2947264128d6ad6055a45826e717a21635d1fbd1102f46342648561a50598719a3634e8089f84c040a87c72c3286db56647c382dabc81ada9cba737ee62b56591e7cf2816625c3a08a47509806be4db58948084cfda6524703b9ff5330940166b2f7740d659256b02ced9d5b4a7130ae408a8dd7a7d40f63aac6d4965110b46fa873889d8d34e73285b82dcf380077bd3de7937ab377abf3b3b1aa226d50d2bee1f4b8dd42211e7438821ab3fb2131da1a504940bc0c6e95b1c50fae05c47041348f8ebb98688a911fd816a1180e589d906554da661e04b0d9c5da41f2", "d56ba570348f80637c4d0ac014db8a121c2520dc51c8e5306deb9fbbfe2348ca1866c123dcf93586227f0020b0ccf265199293a6cba1299e526d0eb5484711b22922e2cef23efc67f6b9ce60041eeafd89e7a51a108352a57b92e12f52fc8915989b6e1ec9c02764e308aea0421e52c74d0c2862d5fd408f9b2711e285b0263ba9a7c07809aba6c539c0c7a886f0a076cb15094249be52d44e4a34bba9c9952e99bd6ac353415f4563b43443f602e542a607472140e22047c15ca34692c2311221df1a012004acf42df43c626abd6654faff6769cba61cb8ba8cd1b093dc51239317922663263f7b9145040ad87807db2febf6a2d9fd64eaca3dddda46e43e4681ebb0e1952e316cfeb5d3e92b126fb4ff19e7c7e9973601dbf867c53fa95af4842d25bcbfee33621f113486910bcfd9449268c854b82c61fd3e3af909de0cfca4e795cdefa075f3f4684e5d265b3499ec19254e9a99ebada789a9c7d9afc9fb651bd33eaf35f9f4a3fd7277e44308da4676305c3853aff45309561363c17a7b0f734d5039825e9534f34df9fa27c65b0ca578d1a74fe0cd0d8fda18020e0943da1a96923a96c35c5b157666428420f9aad2eb158c6e1cfb00d7321a6f4d5c04be9889ad460382ab8e12f4aa04e078ba5b2a2b17a8724cb0ae2d476df66b23184b6f0dccfc225648a558e81b22c48b91ad20ef3afa728aea4266dc551a8de8af4eb78884d7c12feaa3f70d16dabda17c2e146564584021e41da9d8403afed151a8b7f72ae8080b6e83f5ab63e670837545cd6ee1c92e83e5282fc0394a024c0468d24d0130de4a4b3914df5308617b9c61f8c1fbecf4504eb9ceaa4050f75d0a04928a5add1bfd67fab24abea263c1e52086f72506956567df43b8719b2d0d214108990c556dd9d274532907e05626940da21a19f16321b00343a996400bd9f07eef736b0fa4b6d686dcf0de36ec0b932a7f34a7168d3b3eee3c45c962cc9ab99519f0b4d6b3d2ff03af787438c115a911e6fa5fab86931ed2a0f4795a2b1b21611ba234c24f37857076d6ce5d97e2ee2e54b1578b4ef6a493d91491e5b8f2254e18e3437b59b8ed9e76aad258ffcf8eb795d8c966f808f146473e9fd80bd0c35b4816866805354fd95a0b42fda052cc511021c2d005c0c6fb55a63f403894168cd7f3d10808a9152b2c3998b251dd7cef5f7527fe95c9148917ee263747a8fd8712da56291d614e048cb8d653c0ab5c23ec5973f1131cd86062788a67fd5846134542b7133af7857813c94a7d6b0751b3a9d0eec481ced710ae4d33e5ec52ca7ba26f09b96656161951a0d0f275a5c609a43d0e04fb525e6030a4c98effe771aefebee4dc1081df3c61b4290c6ba6e12fc575280885a826085ac2dc36b33d241d5f3734e2091cc8111f99efa1218627c7daec4a84a21c45154b8ed8dc66dbbd09c4e7eced4faa51ba0940561605b19df73a2e6fb19755b8c5d1911a20b5048e2641431ecce9a397033bf1428050c62bed875fbff9b5b5e37c21c802ae20b89d3b5fdc1de4274d661ee371c903cbc942377b12ee4392f3ec7c078253269b012c3378e34ce82e7f6c0c962dc8e4d8c3d712ed7633974b027ff8dcccf2d4241253141a16427bfd82a0ce0bc65ffbea729d1381270be597201fb4384d144dc96f6f2401aa00cc0fcd08ebf00155956189b4c33d9d5bfe0ab510e4c392f1c74f7faa5894fe1b2a02634c11abee462d50e508f8a524c47942776022f4d24f571a8de8e9c9d1cb96e2571c7a3261a3325c474ac63778289704ba1f0bbbb6dafa7a1205163b45fcee3b79a322052ac5c5e7f9063d3d6cfe79288d998d00d2a323b0bf46b6e8ef0423849759673d46c9db355e157521a0eea560077538f8b54c6e9c7228478fbc220f313b255952ef259a910fe6f2edb6686cdbcbe03f32755681f6f0185b1bbe8e47fb80916227dadb53f529b9c20c414a45f11fc5b44a3606d4b59c1f1f2973df38cc9bdbd9041fca9327f0636336dde7473cc37ef6754b06978f01fe7507532f148fad4b4ceed1ff662a465ecf9928a9969624571fd916399d3d5e10c79e94c7785f59b99c9c99e6b172aca396b108f02f47dac3af893555448596f84d3a5d37ff71c7f074ec3f4fba5e6c4191b9b7964d65ba68cbc76646908427c3ed3541328fee1ad371ac8ba850a0411ca954494af7409ad9d3c6ceb8c4ad59a705fb21490efa07cafb4129df0ec991ee0c2c11f78accaaa0b32a197c7c18597c74ac5682f8c2281995d2e7566ef4c33a0333d941c52b765071e82bcb8c754e18abdadaa3b7162c86ea7f898b5c3393e852898b70259db44563f341fa90588450fa052b0e34e8892300a58db7569be089a67fe96d95edaf17fca106cdeb4cc52cc24053c20e94a2e261673e4fd260d2387a87f597c2077b2953ce42f79a685779d6b3e611d8150231ee4da2fb12601471fe3dd12b137ade5196698f6e0ac3c6c1bdadf384edb1e7a9ce75e83040b623aa7a754643cf1f917c29c1008c192ace91423cba84a61a6da558cd1dfba81a39eb008a913f3c44fefe97049d52f8d04a99b0a86f744c5a8dab84c10dfc08aab5514d29c09b8b865b2abecd9181fa9bb17ab8def7a7d9e1e824d214ce4cd8c4631fb3edd5ed86b2e133d44bade2d080fada754830c87c3ece99559badf1b3857b8c5618d853cc45b0ba87bd23ac3c1b7a4ad0418eaef4bdf61c83a9bd1d2fee6df9729bf4d16e3b5ec11cb6f2fb53c9c3281963a2f89ed013096aecc7a15efa33b81a79a8bf9dda75e215d6bebf45c2782d5339153d924299de502ea996edc421198618fd2b03f8dbb67b245376b0ae7f349a721d09f8ae56b855b5332396fb272af79a943ac0b12fdd1e1a7c7fc042b598b1cea55ad27c6bdd6afac067b75f899d2b6ae6501aca728d7f6f06ba38062b00899452099f95fe9b9b31ea4fc5a573e04bb8989ebb271d3d35ba1751819ea2038cab61bf0c5376cf17fe1d4cdf66dd43347074440d9349b3ec385b5ed100875054f51f64dca18653df60f4fd82129ca32fbf9227091617a1602454719e0d2d81877761e74ed40fabfcb19c3176cd0a8677e138187a9e5da9469d253a36886be2a6c81eec939755f394040c60f01603c65bf3a46724152085f196b75316ad13de92f41c60d6c96160629e611af2d9abcbb6dea93bda4788fcd2922839ee72141c1fb12631148c1bdc5c09dbaa11913826b483dfb2f7a8c6535a5c821e6f5c28ea6a4b5da2cf629391455c694ee0927a2f20a76567624ea6bfabacf25bcf0ed96050117cbe661dfb60da226b181b4d78293e5ac066847ee695042ba0f5138b66308e3e3c8650a3d0ecb31dea5708a4491d79d2ee30de9dcf9f91188a3676adf71f80eef41a5f7a39a280093404780aeb88dbe6345ba9da9f1d577db2edcdcdab4d4b9e33df8297c29c636e205c1b545e7baa4712ec6daafece798d3296cd2848c3646e8912fa63c230be15e9fefe99a81ecf7cb6a59e75cb293f41e6dde1ebf1790f8bbc20a5f3ae75209e876bd9923969f76b48e9a58784e81d854a9d366c247b0c448d9df9d1630ba690af58285b6c86b81383946d724e38924e052f73d5b95289567c7d820bb1313f21fde119a2144263130afa7d4545a968031cae1159bfce6935e5f8a9fc2df7ed5542e01ff1275f784e3d7fb642d477852e981bc79b82129b5c64b915db363c4a384e36f13ab65dcb2279fc378081ef062f9daa99bd3dfaeb1e2dc0ad4a7a3a9a527f4195ff4a238a663f0ee5d208b91287c46d0aa40b741d2defd4f1a8753b4e42cc789240aef605e43def06a0d5f7567ac0a2fbbe58ea260b061e40e47a16cc96da11fdaa6b7d33d9b396a64416af691a74bbfe86eafc14efe1924624c9ab89343f69ef7d8e2038a2b3c537bd940f9a741026780713c1e0585397bdb5338099bab6c16b41f39b5d455d083c567aaaa9cccbe5e1933e124fd6ff172cc265e23c9eaff646779c15452dbda093a390a3c7f775138e2c81a7378073e1c4dea6a747ca1e0e88bc2475ad1371c03f31f8008a168e3052340d46119e80510bd21e308161d8aaac00c136967b0e5b61dc2e9198cd56be93192520a828c5495fcb12c64c6f7d7e3d14ed5da25085900f540926e78dad4e4f6f5f9b340625a52be032e3db7da0e0ba83cd49cedd947452d83d7a67ef37df04f8713c35426a8e92d68d881540d2c84644dcd1bd153ce5bc0f840b2e56cc50bef5caec6b27f41311e0e697b89d1eedc5e90ec5e3484b8a6de64221283c210c369acbdabf6c81365df959f87dbad12c7bd783bd08f345c041bc83ac0ecb5184e9b404570ae758576bff8739bf93caca7f89b20e8e057b9e8cf7eca524237c90e137f0aba0024e63934118b1424f634ea6c89678137f69196948c6c42c9268547bac3830ba3bb06a91a33d9f90396eb78cbebbc65a9433edc45a0bb17f86904b7f65524b8c13f99167130d6db295af7060a75c8834b323428fa4747ac9681e8ed7fce3569388482e8c3c4cf67b296dcd3f77ced7e97285dce8ff57b7117499677b3d9669a53397a950881488b2b525291aba563d5eaf3fa20f13c694b8b3eb6c85843fc7efead6ea2f364a1e5dad98625009185453aa36d9e51999f91f7d0a2991d05e2bf3d31c3c33e3219163f5b4b34de491d0bffd5b87d632f0b0723d5ccdebd3c2a9c90f1285a5bbd24f43e8dc4f2f55313731bd7dfe9b82aef153334ec9506986c6fd6f98c1c356a276d9638d6b0e7058284d54c885202850b3255275dc56ba8977d0661627ce9cc95f7ee353d3956e4896761743edc10c4de66b5c1f4f4bcbd10a9c5aac5fb0293d47502f7dd6c21b7a98794ab38c34f7b7d56528a93e9de24f9309a795e720364bbc44e777a74bfb72916905488860853dd60c095ac0024dbee0766ac491af0630d919e91e46673d732de4b48ff9857e1aac550974b9433fdbfe6624f99f6e21da12043531288173e1e218757ae53e395c31ac0bfbe7d1b0a44d5b785062ba17fc7577bf5dc5ac5e1a2c0987476f14715d88809bd5b9acbf21c6132a8cee5e64bb06f14505a311c7a57fbf06f5ce942ac600a610399b44f9419d5f25d08ede8119678f722013b35f2e690f48fa05a80677cab2fb70c306d94b1895ce8b257e7a20ce6df77ff0b3d978926a152758e35149d72bd0673c889ac6527dce6eaeec49460d7a36269ed8fdd9601018f838569ccf616bccbf5f0db3c71b9ac7ad46ce4da8f924c8775006335a621373c1fe30e7fa784fac3cf254832c2dcc1a6fda2b9e24cd107bc0e77fbd131875a95888ecbab8b05fa9359eacba880040dd45f60c93eccc420479d5609fc80fb289668843e39ba37a22c4082565ca756a61a307228b2fd85b19d93b5fa0888e"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000112680)={0x0, 0x0, "d4b416dfc8fb097ecbf25ceac66c277c267016076baa4442bd924c96a3152608a86f25b8dd5a3a15147111ff40132ed5c2669ed1e16c876bb7de2cfb6b2740703f41e0f35083200e8d41010171ccacc5d48707102886524ce3df085428298d82bebf417e08d85c6921fb5b2508310317d9855cf7d14eab0eeb7475dc3784b23c01ccdde8ab4112ccf93d375b523dc2d99d0d2855641ff872b62fef20a7ee894338f031388268d612d9081aa3028ce9fd9b46267cfb7da22adea42f003032302f8f9de30dee79091f5e7322c8e17ae08687a53f8829b8204343162f19f36e4c511d10648fc9adb93657794f0d51bc7ce37c3c6440ff67cda32544c57244cfa1ac", "216651065ab40cb61f7f5e3c70b70a04e3098cbf9b5a8888906e37fea1e580e2d94589aad67f7a81dc4b6bddeecf7046278741dbbbb93c5b6ab86a6494e22f7392597720d6827c8883c4d130682e7887bae78e5e91ed96239cb7a3303ed36fccc2bdfa9026a3ff55977c6a812e046d10eadf5581731b7bb9e4e608542c2900e58692720f4f021f48df8e8e57b7d3aa0b4dc275560e2c8cf5fc198b4966c21b05eae46119496568a7eb3c720aab2b1e5dd49e4439c50f4019085b8838c8574978ff7880af30f870d7d49918a1744cd151c66ccd674e219b6ddfd99baafa854540dfe292c16496a930e338279e58fd71d0f5fb5123f1f62ede271e4b38a7a76e602343f56b553f63dca211b525bafbd1efb75575ad3c9ba32faad10e5b45181872bda575971979653bf87939468abfb36b26f3b4c48cf368c4bb70cb3f06914a0f54f029c327eadf264a873739ebb72e449d0551f1cfad6689c2c7fbfe051c445850b5f00c2a844a81e909df2057aee976376c013c4db8ef96f9ada372299e2926ee45196b95e52c7bfaf714de1792c7d2c7f31adec62d2f1bb407cffe97f8b49e5cd0e383e44f2d8d66c9c6511dedc92b2d7137adfd296dbdcf15c9f4c96a6c9c65acfaae5bc06b66223a5ea0124db341494d248f9ba46e4c0f584c54a24129b9fd077f9a608b58f7bf1d9c744808ad9aa5ffdb04779385cad7d1c26bd7492eaa535dd263fdfa10d2e0b40848d3edcdea5ca0e61f49f96e630cd441e4b558dedede305f940cb0be6c1124b295e2eb58a09c76c6b9da2fa87f2e67c68a649b234234d4adea1c0542751577ecaebc43f58b0fbcbdcde96cb0516a1928e35c673f4faaee0baca5dc63850920036cb610212861dc748b66b20cdaef21088105d890c2bbc017d8bcc73acce2648a914299e551ad2f21d524d1409633645ca8c147055a2b3f13f16142b5b2df8538382d0f806e85ea2e75707a73ca440af904248db98be927015d50fd7f05393996f7f8377a577bbb2d6ec9d67a4f4c2a8e8ac912333ad3bc5c7391baec4ddb9db003284d4d767c735d8dc6ed8054104a660236640d987f06192569e03a70064adc6d96f5a60249979697723d13192d69ecd09f1c66cc63dc501618bad143fd2575b63086aaa44bab97adb6a81d9e45accfac1a272c38832e372a4f845f8cb856051f88d56b193d85f89ee04be36a9460b093552aa319ce0427120c4c4852631f3ee3de2e57f6a51f9c4848480c0d57177b4985ce83dfced7738d05cbed055328679240b943b7b66f85a3cb258cf53cada490238ca83941d7f349bf03f88a1e6a9e475fef8ba35f7d30e39ec7cc787adb843d557168d02476f758f9bbfc5cd4232a50b30d03f1f8ea644e15409378b2ddacbf96225d699d9526c354f65274466009a61c91e4fb2dee9a23a1c1d6a054ab9c48fbddebaa465ff741ddf47ae71dd79b84efff8f3a09b6b4164fd79d0a86d03ea5d703910caf4446f258dda336538b09fe1e535db01a876bebba8607e19c497a18f8593fa8acca46f8390e9fdcbbe22118b5c7c2d1375f33aebed54e375e7ac1dd68b780be7710e9f598f11dc41e9ca983812315ffb9b424710b7bb05526a503f33bbd0d352604b4a9f906b51d3209ac5e34dd6f1f5594914dbdae4b1b6c1ab3cb2d64fb788c635aa1a90e9aa26d295b61d41917b7f6ffa4dcb5705562a4464efbaf9e3f35926b29d6bf014dd5997e473a4d769ac9ac99025d0b0695eb7edca42f2844d0bfb210c32116500c75dc3c6eee597b730a98be6a4a95948c85d9a309caed22eb0f3a41dffb665673b6268ce3c9577b4c719475d467bc50bfbff117dbc011a98b78620b130514cedcd7441cc837dc525b0dc7ddb310c8d32cef60e115d311528e316d77fafda5a7d4e37f4d4da36645cfbb1b09570de5ba63b02722e4577cc26041d851849c8ff84655813ae858d20fbaca560ea85b33ac0a4a2e0a6c48a0e8ef78dc09cb3d2cddbfae7b9c5b347ae56258e1bdb26b5116a4fbca01b6c6856015f34e8cbfe0d21d3a037272393ba1997d39fe84023bec8d792e021dbdb18c7732e6bc1e11b129fec2b882e22a2d290695e4552eb4711fcb0fa4ec15c7cc09d15568a11f270729f80be914e56fe5e86cfb872f37b48e46a345492281cd917b02b5b5e28c072a4e8d1fabf078ea69b978c54b8c6a247ec332cfee91abca383bbf4b71e88bcece22b8ad93e31b678fcd159b8463c6592945a7468eb4f0dc5e5a39bcf8a2eea107061b4873506a883164f54dbb20089b9bbe7c47298e1b95488052ef161b124b350260a18188c3a6d4570bfd6af78d67d3732d02cc25c0dfc05c631b690e9c000038d7e25dd537098b9cfe980a0047f485c433be5bf8f5b4538db98d0e2227b6eb79f1f35fddeb1a6b8e48324d52813b7db52054987e334fa1f32258f449e77a9b2c095297cbaca13fd4a277eb9be08f1d2dd954e95e86d665ea373bbbe90ad7ad77d4e842ad9973fca979f0aafc326de4b575147803c8b19d49e4e588eb89d95d9de045b3737ee2a1aa92baabb48668bd9aa4efc3f13e749a08f9ae57b6b640e37f1008686cd8e618aed589f80aeae9d9d4b8a42b70884b90d39ed44d9426e87079521d23cb23de35d9ce4455b70d39369b93a0221659347554500587fd000401f232326eb671e4edc4cdc3848bef7a0886c12092986383b87ee3845f750b927ba24f0546e8edf7bc12b770e347c1adc9d465c6433feb0b1add2a300681fb449a2c98163da945991b995d75a9187a359ca5dda21b81c142962eee890a9ed71dca76fb76d5d2668b21b0adecae02f143ad47365480a238f44227c41a53f5661ebbfbec88cf85b9ba504b4c8f1c799b7529c31ccfb244645ff95dbe6c3f37bc133f01f07886eb3c045236c61de30cedc4c6ea160f47d0302f6f19dd7976c9c63891bb5d5d71a61c6b9da09ddd03866b2760af8fb5f57d33a5efe9033d2a1731f112d1e0a1b810df6d33fdcf25048548b52e13bf9c40a498b559388e8326e1bf3888c3bb3e919a01c2eeee199c92f2217ec734c220c31981aa1fad103eac105abd9dc0335404ae00f351ce1967e826cf34d823e8deb937c44c52f5cfb013d1e8c9b3e41cd917adc71d70102ae6500e1c96031674c2a5583e8e1a6d887390bc64b037fc475e2adf28969aa22b59819706ab91f779c79c96b226e785cc16fb4b03f27c842a9429b8acc39140c0ce1b9aeaece2efb94881c53a0d480ad95bb2442f87ee1ab77bac9e5ecfee418f0bbfc0723ac9605688f7ba3adef7748fbb67cb8688f1cf3caddaf4726f041190c8155759e4656b1046bbf77d8156d5dc58c70d5e189f3497d1628ef1304e69daae3a5d8515a787e659a581c27db718e257118d5fa6520f58720fbe873a60ecf23a38ad497cd22cb37f30cf0e9bcbe79c7153483305fed73c2a8d9cff9958f436084896f9339255655ec58cf991c6f0a367fc0e0e3fae2b400ca9dd15fa25203a2467200b1e6546853fef70e23d71c016b1e350121e893648e279173d2b53e7310cff80366c36f81dd9f68e63e1147e278bce8c2a824de1fdc0b0898e6e399ff6af5dbdf8ccc7e14b6dd314c3c6a0bc70c64f16df84187b06f6f1c65f55a6a58082732d79cb2cccbdd2fd6ee1f4fedccb9aaa96de3db1c4d62c5b5d465c8fa4613ebe590e34d02823bd5013e8b497c82be3be7b7615df51f4e0cb75b2cecca364f9d1638287dcbb044a15c7eeea8d0dd519e5f670c41797006c44e18514b37e36c663fe522591681fa0433feef6533d666903fee5db38308925254c3bac3c68c5a0f7be7093b48d8d32a8c041ef835c32612079880c0573f71d6fa4fa2d8e9515a7ed44c43d536ec780b141926f33b3a46105265993e1208416d9f133d02b8a5da8a305cfd9524b570cfcfac15dd86326dd1c5a3ed42d694d2188f681b4af2ee50fb29dc103a536bfb6f775d6e064084f09814586acb0e4bb621b9a3233d7cba2c1d46cade39b8d3eab4f2f5579c22f19103ed8ddb0c8732ae7b126057b9729354c32b0416f352a8c72c20d842de25bba5c89c14b8bb8fabc4d75fcd67af45796477ea8675804999975a60582fad9a5fe163e00ea71a29437b1e87397dc2c1d1994da583139256d20f7277fc07c8943399d92ea8743f4ce16fa94b6aff207e3067bc15de34a474876441da3cd918119c18dae56d710c74c5ff0c099f08f585e0a6cbea58f2f94eccc9c9e3121b24570ec26344ceb164cabe1d63f397bf25b25eedc0e4fa32bbf959fb5e6e88dffb8c8fcb0f1d70bcdb18f76e4a8fc9419520f6c6e73168dd12a92c812ae818747a45296e21ec1dd702ee54269940d6ffb019e2cb72a8c64ebf33623abd3fdd8a3f4ed7c8612e68a3619e470b7c28d5ba860f33c1991754413a9e54d207614071af9beb504d95d328f5132eb3dfc507cbdcdd80b1819828d126d04688f6238c37736651ee65d5e2d80d30a8f8a3c055a29a059287244b7cc173d5f66351fc96895b9f7b7c32bb895e2fefb997b556ddc4f20ea81f8f6a01e586b188b660d7d6a47bb33c7c4f63772c29b3b2941bacf719a35e61392f031ddb4cccc9674b7af847e9d2e78ef632e6aa7c79459117e2446d5307b37502587717c7f6c4c25162d2fcf30c18ff1dbf32adb5b49bac5012295c9677e69f36e616ac05b3ec11fa5eaec177584708ce8fb22934421b038b6a9af16ddbaf98426e2fe30dff566f4ab267ca2afa5ff32de831f460b784cfd9c07ce032d5195208ce1c0dd8271f16db184ea340c86d05873cf49727df607d517f7c9b648dfcd67a29a60d7c511e9e2b2d231e0c61de28c5e9f2e3ef62866832a946c9b42d926f2530a41c90e11b6dfba917e1d59b96a674a2168948b6847185b2545ffbfef43ea736d28ca0c8b6e5c89d99bbeaadb460aba422cd75228660063e3f75f9e517b456d5e90765c35b0cf473dca4a4033fd5193dba38048047d7fe84e8bb09bb6c3524894679014e78c80903ea0cf186a19c93aadcb664a9566e0dbc62037b2363c514fbe3d7124d28ed769cfb417e3280b17494672320cba2e82b9957a457680f12f32782bb67f11be4f6ad85bf587c9e12ed011f94bdcc0cca52d2719d8c89d820e8181afdedbab2d33e5290686cc2a0287f797ae716cf9559a69459d3c4478f5446f01a173044609b5579f3f6e09dfc1ec28800b8ebd087a7df022ce327ff2d40d18c28a21741ad33a8002376188bf5f38d9dd415bae7ec9786c163adf1095d3fbdc8f34127ad888bd2d633f3fbd74f31c47a5fc79fdf7c86bac05976896b5f35f8d485499dbbab7c939265b2ec5bc38d8a39bdf6481ebbfedd4a1666aa469a6410860f2e0e0af399d4950e344a75379ee0add77668bae63e2ed6c8ec0b999620f6a"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r1, 0xd000943d, &(0x7f0000113680)={0x7, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}], 0x0, "bfe018be7dd971"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000114680)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f0000114880)={0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f0000114a80)={0x0}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r1, 0xd000943e, &(0x7f0000114c80)={0x0, 0x0, "079be91e0619ad135a2db878543fe83bb4a3cfb584b464752ef3020b375b78dd8a818e8bf485c555ff3ee82714a0bd3aa7dbaec4fe8e62dee6f24ec9015f2f215754de313b6e10cf2f03afcd56e766e89e96a7eb695e3542aabee13b5f84f10a7e1b84282cc5a225764039c15c97ae51a6a50eeeb4c93d89f33fba219baba90947b5b22a1324f12d32890feb888f8e11c95d0ed8297083e24ccf9d234db20675c01c14dc1c55062fe9c9b7cdf3bf710b0db162dce592d8b0103d83fc1b1b94533870952208ab3f4895e074b822243f1294bf50082ec44a50e43d3bf2b4a0d4cbc00b63354b6b3da1d2362233188069d1a96215bda8a97f3731a3bc38b9da538b", "0868e5f49c02d63168e4da171c4e7ae1e5f7d5c08ce7fe3bc47c7059fa176b52ee0100eedeaa6ad63c2038b73d11f302f47b1b091ac07311b18cbacb8bbd90324bd2462a557b0ceb6bab739b0760629a6ddcf55e171dcf482fd041aa4502e41d9e7193ebf58b7a5134405f68c2b4b9c2a8cfb55224660b9a44c693ada176332569a0bed7e92ffcbfaafb0b826effb697f8c7a43cb5b4a3746e3513ecf38fd9f28d6d9ad18519914b18ab965c218ac8bc1c0ddefdf0abc1752de130c614dc100276b47708dc6009d7abbe10313ff84caf24ae62c70837e8dd4c77033dfe906d304161edde2f314c895a89b64a1d363b80a5452c456a9ec940e757e6e227fd5997c0b0b64ac3f6f839a7d83da370a65deafa16902b069d6f5f641e426ca1126376c5cdb052f1129023349d685b585124a5636953699d0e4ef0149e657055223c1833b565e16425f8f8812df1ed034bc3b5238806fe39de6640df23503fa3a34a81a0878a08b945d6479d6fb87d5ee9d299a153d60919e88e858990b83cf2434344b62d950b52624f32afe483cd557c25f724f7029dbddc7dc6c00a2a2ddac2dde7d266d7736cb86471ca569bfb8a8a37c55ceba2dd7b43fbcdaa9c9da0da3d0ff0e8e2af047431e582d2c590826cb9ebfeb73ff100fc1b1b2b7357671a79f4ab76c349a08ffa9247d466dc0615075b9e465ab2e4cf2affcf70d70c8f2c0a394b189dfa4c5c2f33559733a6c9f1efd860367d5389675b007af455c36d91dbe13c80f3a556d9c1e6122882895e550160626ba4891800372f4e82d8d571331b64b55af0cea6f43bd43082099507361ab738fac1dc349d1207e130b15c817c4d6352cc1d087ca300100fce25b93a6d8d1d842f79764717f0b7809ac40dc8b295c93bc09aa241b4b384bfcba6c148f54ee64637bfd137b0cbc57401bf69b00e8be6af70c96b225718c9c7e97134a5328d3d5d4bc4c7c5ef913c0094410c00e1fd6ea7d637f0d69869f6942f0d09055f5594ffb7c09e34d46a7fcb2949667231f89fdf549f7516db30f8dbc6267fd9d6192f8ba33d688655449e06b50979c8f9a4e14d2bb3f58369e9bb4ae71b6e68b1b6d3f567267bf83c08f0a98378646ddecd00dbd6eaf45b65c864d826b366f4ac8a2891602e556d5e10aac45fa085c65ce56e91a4cd9267483284b9a80c8e1f8e6957e45aa01d141093051417daef772f34734ff8aa7827f7940b418908effff223aa4e4ce3ae348341706f74ed6725dc7ff19349fd29c9392a81dbb4372c692104b96f9f29ed0bdfc20274d3690b329d188afe79ed25b29bd5c3bef62fb156b45a4f9bd2f8d06a5c357f0aa60cc27348d5d7befdd24ed07e3467839e03f98a5ecacf0749290ef9f080f87c1dc0c545f7a399d8b5e555aa8737bab27a0ecc34c8ae1d96044ca113d4b7adbf9cd051acb499f7175d0a15dbf20ea90fd10ca5745383dca7aa1656d2ffade488f484b80f43eec6c3de49d57f6d6dee737c4809e6d998b8ff9215dcf55781e38f4c7d1a816da0359f54aacfe468f0587e2a3ba69bcdb884b5eb56b262914701f490b49cba2ac8b5708155ec7a4344e89c48bea4e0670894aacc4b34df82f80f00974d7581dad3602b41f23987f56a5498f2fd4cbd263ce25d38d3152a01687804b389a85f8585b913606dd0e246b4f3857d6871424c555085c05521c2c5a3b7f36f52b02ea417ac1bb9da75746a6c454e2f60462f01665a0637741a66c136b50147abccac1f280690a2eb6bc3ad743fc323df08bb94dde9d198861c05764cacbb72e4bedbbfaf5d8e60706ef82c9e7e9630d075843af1f642db25d0046d2e0f3e51de0c8772c22bb2de9aedee3cf6411496fd891864d7ed7d847d7eaaeaec3917bce2bb3bdf79770d9e082cb825505ca9b6cb57f09a9d2bd9ddbc30242906f70e88a404a03faf06af69e0da1b4b4d91e910fe1c4d71febe1c96fd4d298334ed6c15ac2b82ae67e36b95c1a799a7e6d135b3fd41210b0364c15a96609798a02eb33877c4a9a295ce862aca3b5e5c8521201042a937c9dc6a14b47e82e0764bc90b5f6ddfa177c0a4492f387c1956d1e541fc5f51a2524f65f9ff670ff8b5dbdcba3c978523aef2faa404613d539042116a397414eeb0d897a5ba8ff64a86afe8132d2e12a6d41fdc63bebd39b8db390aa2ea243b72360f4a308731dacb2eaba825a2f243be5912b0f4152af64f39d4b2ad414a686b5242ca74dbec93dd6f34725fd55c4381bbfeb0a262d92805e2a59fb426891684e9fd107c593f1b006fd30ff97908780c8a92b589e4f2982333d216c91a2e1857241c56b0c6c0b3dac830d6866dc90e5f3b37c19d666c7dba9771a869b51e93051a6767768071b6ff1b8a761db3ce7eafc241a2b96b297bc888f01f59eedf129262274bbadddc2dcda65e7ab2d4adfdd4007e4dec69a0f8c5245ee711c9034e47a90c6b71956418fe1166d27d8e72e02c75b27aa7ec38ee04620973ceb78854ac60914f32771a8c3ebc0847c5f3e2d399746aecf8138a723cff7935667b9f715ffb25a49db8df80ff5c932d7af49b52e08ae32b030bfca5453773f9d885e9214424b419e8682f90b786eed6fa580ef7879358d26c5f47af4152aff8963fdd17d75fa5cf49287fdf9e528e601299e465d633478b71edc838479fed693fc9ee8cb2fe80d490afb4f60552421067a726b58b1a4fce344ab4aff0d10a456c84f7e2733729b5451ae51768a7d189654c715a727f9274057fad2adb4f77342de80bbf194ded1a7f759c4ea4cf80fea7fef280524168d6b49ab8f8781a37478ab1f0fa6ed3be9be4eab7ca7cf912cbfcbdf585fcaf8d7700323cfdc0e5e29c857ad094ec2fda73d43d4dd087a591bdac467b32f7422e1779fdb278e9c4dd2858079d1a9c76de7fa076956be888c88494d45b40e8209f6ed4d635f8d7d041d42ada463ea42fefdd2a6a1af55b20142d356f0a0dd0371e581d4aa68321fa97580f6e6dfca7602bfd8dfc559236f13bc225c45183ecf73c088d066238bbd85d314b36d5579ce67d7812a2c79367f347120c320dace6c4ae75e9bba5042fda04ef52b2109108cde4c3aa21de911e24446a1bc8c7799d71e7cb181f43eaf90d4785afee63f5446f8f0ecd459bae99ae5f70ad35611749ab0277ff720ec08177e64919d6164aa46f97189d744e92c1a9f1beb554bf5036a6955b0151677262eda27c811b50faa0641604f872e83b184a5c004f54f980562dd293dffdb9165977c4ebab8c590e941e8de8f5562a7f1e473e90fd0504721a1bc4cbe9416f5b90476fd94e9822a2e9f48b100338ee340a3da2be639cba72b457a996c963591f586594e2e5bec3b76d618650cf37d6e53f29425c4cf2420770124d2311176f9291af26a13dc436b17f119bd9c16d8761944e06d726e789b5e17f1181830f480ccbdaec1f98f65351d19ade96de194c5d8a1eb0ff4861a9982560a05023f1d11fe3cd1cdaaaf8e09fd8434030f497ffe13d886d5a1f94543a298bfa00fb967de34d87189f3a9cc11316ca809326b4681412c3884df1408ba164f33dcc6351243e64ccef5e42ac3632875f5bdb76e668cf19337707c9f275c37932d81b83a59cc38b907e89e95b1313cd434852097b4b5ee5d9766be99d0cac4ffcf44283ad4ea39a568fc941eeff7345d9b6a9b5a00beb55a2136c97431e1f52776c8c434035ae19edd928393430a7a28eca7ed41633e0c4e922412961f0bafa9b79004d82b0c0a91d529a64ea0cdf7e4f91ee99a682581a5934bb7599041fd11e7f6faff7665e31d62330b26c252de5d15c3bec9d04a42ddf08228ea3032489eff2fbe1859c6d582276c06db65d51f8505e2d4d798307101e8f429ffc7ccc96144cce3756d0baba9abab990f4ebf020a7030da3c329e4d6959a25d14748a8d8c43f694eccb898ee17b3db0001d2936f48b7e8ee41e34263a8360a260925d4a36f2a74fab9d162dc61b926f52ce8b9320940a6601f533572ea012cbe4a735dbfcfbb48c4173bcdd5cc238f4200e8b11be61592b7d140104c8b14441d003e3519da34b8fa1c126ed9f71f9487e83ebe83687b64fe2b7c4ef975ed297d33b486d57521a97b8d2c8d5888d5cecd826f85d888836e577802ce08e7ae4f9f685c26fa421b7a2f0901129f8a338254983cee145c2f83a79d2b709cd49587cd6ddfd9848e68269f45bd5c09c1c2eee1525a70850f40c232b76adc979b7fc2b2b0bdce6346746f44bc0b15b737c0cd06d5adfbde32ee0d3bbeb76409c072e0a019aacc703128871cecd0f5bfc3b4321b24991a42b65b1e829b2a42b69f16fc3ca999f7f40f99dadabf61c7108082e10307a9fe17e4a01f0cfa9b0eeb4abbd8bb4a6612452d0a3c3cde02a0bd8a8a3cc8805e89283f62e2fc2d2c4e905e1a15d0cda24ca559b278e4db10a4dc3ce956f15433c69575b5ab9e63ec2ca150fee887fdabb8f05b84ff099bef08df78ffd685904781ca1ec72b25c7eb1267cff4be6cda2fbe82cdca061ba51b9c2597b1ef3d9fdba2ffb1ebd01b3bf557b5b3f7cac672b78e6580e873c4dd4c455e75fdcebbfe1482684f0d3c3dd22681462c02ec50d10c550cfe108e44e37813fae935e3acbe8f3c1ef5340e16c714266febf236670bf47e1a5ea19d04a11de93a2cf1244dc6478f9ecc6d15252190fdd5c8e543d51f49220935b23ca31f0983642d5f84b29afa9247533ce7545ec771e6a0c221dd80d4401364b3d776dcb555a8a5034ccda63b87b9493baeff66c3d83557af5b831128364d68a9b1afb4c51b6544391a17a26111955c41a1bac71e744c134610601628d735bfe9b06570c5c88d5a77f41470b6dec13102e51e5cd161f3e0a4bd20f8ee075086668c426194c1df8a5f36be548b4aa5975f82d85bb668283adccef07cd5f57d3469e6ed2da9ce4ed4487eb204313af7e0e4e79e402b45ba4b9f7a026f84eaf889f989ca6a607f0a1bfad06fbf57e4dd8f6d670646b180fba838d54b09e5897f3970641718dab4b0a26d46cfa3fa08db4ae2493b3012b9becfb4be912da0cc1e06f30d5ae27d9abe97b7c6953090ce3249f7e5f40dfba01ee3ef123470695ed9bcc280a35f737ea9a76ec9f68a7cb2720171ee962e40be151b86a4667773abb9e2d0579acf5c839bea89cb801d2b6beda9862c4dd9f77cba7c4175c3b09956202f371b601b8e2ff4cef44a180a1540bcd5dff5aa007eb1134832323e7b82fc0bcc6d3fc320fd8eec7d1c6c3e7e811646b36862b96faf251ff99f2f0461a031a35810435ee90092a41190cd2324426a6a33df0849d95a4b1f4cbccf7c5ebc22967bdfed828e480bbdba21240b49f04f08bfb41e50b67dc74effda67ab9e7ed9b2a796880d21e3d718ad98767527ace43a6a0a8ca00c"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r1, 0xd000943d, &(0x7f0000115c80)={0xfffffffffffffff8, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}], 0x3f, "a9dc0978c38a1a"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r1, 0xd000943d, &(0x7f0000116c80)={0xff, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x1, "2edfd1a840a5af"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f0000117c80)={0xfffffffffffffffc, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r2}, {0x0, r3}, {r4, r5}, {r6, r7}, {r8, r9}, {r10}, {r11, r12}, {r13}, {0x0, r14}, {r15, r16}, {}, {r17}, {r18, r19}, {r20, r21}], 0x1f, "e49d06d39c6286"}) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f00000000c0)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000040)) setsockopt$inet_mtu(0xffffffffffffffff, 0x0, 0xa, &(0x7f0000000080)=0x1, 0x4) accept$inet(r1, 0x0, &(0x7f0000000100)) 00:20:10 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x74]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:10 executing program 4: geteuid() r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) geteuid() (async) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) 00:20:10 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:10 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xd00, &(0x7f0000001440)) 00:20:10 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x401c5820, &(0x7f0000001440)) 00:20:10 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x7a]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:10 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0xe8]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:10 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$sock_SIOCGIFINDEX_80211(r1, 0x8933, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000108280)={0x0, 0x0, "689f66414a9e75ee1c96e253d204806aac1cd23471360d8c948ab93ef113c95ac72e74b1a9f642761876a74d0c1d5af75c4a8d5dfd81c5a16f4e1393d26d340efa048766e47e744b3faaf67f179886f580fc32f08b7eafa7d989ed8b83040bf96b549673143952bc605c15188fa11aa0aa433d0ba4b050dc36cdcf1ec1a916962c3e11bcb1ccd3b2025e0a312a0f196de0d663f946b60236f432f86a1c597bf3b3ee462a85814c273f33dbc7ceca3ba7f3dcd7676f96d8285db5da0fdb1ab342ed68c938bc1c14db6c890dda9f9bfda9177dc898f05124719786cf099a67b3edbca9f560be25558ddde1fcf20c8470bca5e6ead62048703e58e1e4ff04478dea", "9775dc7cfd6b20890a680bbbb7e5ef0c2857200c4ec2cb8aaee552e1e45459992bba97e0b38ae38e3f07f78a67e5930556378519a6f9b2f22eaf8a3d938ad12dbd88f40983557f82f8157472c1b5145ee829d37524cd25eb126dac11a17bfb24818c182a9a4f291324b9d145b970af10283c54c4fcea0ed4f81c16a5fe05e8fce9138421b69d0b37819196ba361227752c4a84048a67c757ce1a14bec897f864c2b9c168393fe29647f8ce433167ec3ac8c181690384979163b9893130acee1d8ccaebd6abec5b670ec4d7986c26d0130f67729a27deb6f324927537fb64017be0424edda2c73ec356110822797391aec76430eb10202fbcff4a3084a08ea2ddb8b830e23588feac0a13fa1e9417e7d407d2c2cd57a870200251aab64399e96d2f393478eb1b32b312b27ac0fe32069b11bd83b1853b739fe27c2f8ba7d05fd249446874a062fe5947d4f07a636d16e682fac16cbb972bd406a6d1d6db601c55a8869fc291bbb078bd17ee16bebd4d2f96b79aa4bf2409680537822c0de8e460ba1e7fcbdc73925ce015770d02489346f96ad8269004b19f0aeec6a5b53a9a8d59abbed043355e466e67a0093fc4c9e8f040686841c22c6e1a4b04c7af86fd5f4e02143eca5f99b02105d42adc74eaf9c23c1ac70d78bb7a29b7da6dc5aa378dc4dc3b259c86e9e81e33ace318d2c8fac1964585d1d5c0a9dfb08dfbd668b9bb3348b1582a44fbf78bca85d8787c66094d4e915940ea620e8824c63d0bfce01f9023e69e39e0b64bbf9e4e53cdc062815d805ea60f72eb3b4e7a66687ca72766593612118e663765ac67eceb318dba35f392b4a7cd8147fe5d8f62db266004ae5c5f77bfd7dc20363c126288bccb6c45adc993ee845c465f5e10f9d26b175678e92e984d2fa3b2486ffcc1ea11a1ed3b973f3726c48e65c10b6b520a97d47650cfee745c7b1087a9b47e60875bc99982b72c20e39e692071b41b7941853ae94a74086b02b86bd65027eb91bd385e96269ca2c61028d83391cd37abd32fc570d30d465ed06419af3c0bc0721180ed2064d849e0c58c64ddb80232f2747a10368ab383ff7ec4b46b07a2adf3be63c2a2827db4b2b4dcc2d55fb115fcb685679af3d438253d459b5271d35fbd3363dc0ff08703be0e7df0d4dadd35d549b7ce3c43c4ac5e006e4d35414d579cdfdb0d56eecb89212331be2337afdcec8c7ad36b72ee2374e92fcc2d521258aef6f20d350b801e2dce6747fc2387e6c8a522a5389c7ee1417f09b7105911d8d0f70571b267356ddf1e88a24295c49e8769ba7073367f00b4d789c3e8ad95fce5b677aebaf94a905a30f3a7e48e5085553ce8781f410957efb29f4ee2de88a23237ffd36980020ead2a27c1d4db88aa189ea51dbe51441b6ea383ba3414a64444ae1bc30b6a3ee56cefd516b8b92ad165274c1a7a790f961923fbebaa4b1fa33c313943f827cb1ceb4141f1228553a675a3446122f08491f27c147672f664853fd82a6f605c30ee0033b501167b28aecd83dd9a6ad4ccc2295a9466d8f20e8a19ed1b7fc8feda093230d52ef714f2db53724794c989377de14c672db6ae0135b56409e1dcb8c1807db5d492dab677d995baf3d22cc52f67bd67af1d93bcc1d63995668427961ddc8850f1f4f35fd377404c9f2cd80c9e487e6e84c007effcb18ab8f774a666c934a78159d1c2463cf8d70b167c5c1658c14f320c14dd21a0b20e110755bdcc0b9c45730ee5ad40ee81759d4f554c53ae9b5cdccdda0a43d84a94293f935956917b423232e97b407ff94fdc829213da59b242e8ff5dbf6c388b59c0f1e59d32a6307ab78c1b5c5f6798e4c26c238e6d2f361fa4ad17424a71e464ad01db87be934aa21a554708402e10586f9d9a426ea38ebf644c653d213c9bbf4e70fdfe421dac88b5be721edb2400ff93fd520a7c81d988f6e4413599c2e699b635e917463ee932be0e3d8d67c3fdeb3acba3d5b0f0e6bdc3502703ad15350cf99703c3b3c9ed753a5783f870f29620f163a2c8ec427bddf69e273641b66f561c59f51ba395c06bb02da7f7b7cbe83d058c62e6948a3d1ed74d8723d49eebefc9194b88dd35858fb77a09376508685730b740f13cae4c1b4f2a1f3ebe19bf797a0f02bd82c18eb9e47e106fcfe260b34e9ab2cad8564553cbf51a96113d64afc78a3c8b5d6bb4e3358fe4eb0ff28dc841381027a68a95e219066ef9e40199d0e395d6a4907bdd61476681f3cbf489820acbc02cccda01ed26d58a5baf8472be7432a90581530f7e957a041e68f3a1c20c68eb770ed937d083a069be3e7e613138d7f679d5996d1bc9e807c6893f3889de1857da9bd8306546358133f2ec03d615a92c00059ff01a72c466a32c5be3b8ea9301d78f76f2480aa0307506fb0b843c702ca0a2f574abb324953c281c09e29db0227e5220770cd2dee356f8ec814d46e7681aa81407f2f369283d9aad34a5e29c4511b745f2ec3fd50dc6f83c42febdb5ded60e19369c0d441e7e11355a0b18a7e70e5cca40fd3ea586ff6c806c19aac34118930dfec7ac9cb760d3091470c643e753e41fbbe6ef496d058cc2009fcf96f6dc3242cf08f1feb4afe86677371f39e129522bd8b92f2170c4cc1588b3c29d2416bd471d331f0f50cbdaa8d44f400645845e3396f77df23a7157f44d62b3f2051e1d3e53ee5d9ffdc825d763239d5c6563a90cb811c8e12a52386da4e68ab9a0166ee57a4bbb8d3dc16202457274b0ec9aa3945f5567d80288dfc71b3129e79244f53cfea8077cbf0b9a07fa9f818e7aa9217b7e6b90f45a2efe0eb6c01787c769bc3b52361b0f6c4a1af6d8bb19eabc190b8fd2ca431a6dd553d0b92d3ce55ff9daa5bb2c44c2cd7eb98f5c3362607dca198af62c3b347a5f34818c76650dc28ba41c36f02ae69c3ecb55be72a50daad6a2d02cd51534c08bffbef8dd8912cbc58969af7261f980d30df51a31561088328a7e5e1f33103d72c8a8dff14622f764eae1e5bd38409140a62cc9d74e7467fd6d039be8293aa662083f589bda7d92a3ef1317cb8761de4a63e5bc8e9bc319aba8b3216cf5cc99e2cabea746b87300f6df876dff7749d5f546c2f0aeb14ae66cf16fd3a02045e8ec46d5c0baa41db66deeb312f4d06cf76a7c808a289a145fa11d2c0c8249703afe8afb383377770850428e96475398bbe36e379794d86f616341eda1986b9d435e371dcded6446ebc2b9fca9448faabd99313788a3c13dbcee70cdedc5cd7537d479c51398a48e001fc08e36a951d071a37b3e642927888c15ac1c2909cf0d3c952b4ca305590e2752ebaac31f3f3d715b393363db7ace23830b7b56ee476fc0790f4ff3d7e634e6964bc01b6c63528f4fa822b3f8982e30532e2ade2362be40fec01346749a908dfa0a6bcb4885e09b7a6c1caf1fdec1d076c3aafa0715f4676a0c63848c2591a04cee78bc126ba97b814841f5c7809a9d5322b7409d21e80ea10bce75b24cd561a4b40ea3f3b07110c3951b2400c1eaa115a184ab90e70bdd4e5fc9e33d1a9f337ddf701b6a2146395fd5ed1ed68fdcdbb25cfaa05f297eeaf350121d224e7985cf48bb385c26a85fa6607d8f419e50e18f561852101d91339c614d68b28b5481b22b6e75f5428d80d1db746494271f38229b3ef9c69eabff6fb232fe7cb1eff14950ff5244183b57568c0307bdf0162832e1bd750739ac13c42bf261a0c0dac8fe28c6a89bb015ed79086b82553764ff002042ed56a1deb1c4d1359819055bbad470641b0809fefd0b547f85eb81456630cb3640e07c456c9a3965fa3831aa8759e0721d5b0096c8745f2695db9cadc32bcb4d97ef23a91b432928323ce978fe8834077479ddb328b5ced5a6c427b3d2cea74ff6cd2d56d31802eeb6fe7d538d9128b85032f1c55541331c587e01344bdb3000dd1036ba28031e7c34d74a8fd3ba39cafa17a9fe7430b640c0cf6d34e5763f1c5c59a20f94a67b2137335e54bed042680d237d14fcaf1deb7502c6eaf59fde38f1be657d8b0acd14a7db0c306f7ab032864d5f5a8f6da749c6d7eb12523337f298bde25c9a8f7359f16c03dbd8e12f8b0204b1f74645fbb39622dd7dc938cb168c7510e8ea673e1b0427e43884bf8e57ec3e2712812d02355fbfa1927066f0996deb1645942858f784dffaa99751db8f55801eeda0a5ce9d407507542d92d5376164cbc00fd7c353fe7c0f83440fece7bafb42b3f8c466dd20cb6b19cb2db3132bd6381db2e9c8154a4154b4aedd3a72977caec8d13f35b10d95c04eca503c22bf6627583062a4b8a97d0fa2813a54ec1348b9b23c71441443b472e8e2f6717a671cffb7a07fff2bf30da4cfd89a85e30f0ccab2d32ac19f22f81d00695f55eef9f42e5a625b78002990a2f967d40ff9a212059edc60f8ebeb36b8e7e2cc755191afcb494ee07fc46c145171e29aef6f4fbe664103fa0c029a8fbde46a2efa819c1f96e493a793646522765f2ff4a7400bce0291bac5dba3b02b461c5a1ffd881e8aea2682335f01b210ae93dcc16543de86276d9c202b60b474f522dc790e50968f2c32b2cb982f3d004fcf5356597dd21279b839184346a836e00e836551d93e5bd9f377cafafef7262625ee3b92cf1b34d5352906684e01cc86bae57a137b5313c11a630aa42fe93f3f47a8abc3597ec0fcefcd90893a93452bd3cd4c87ebdc37f1d5f6669a76502d2c717017fee5f345d0286c6f55a6f01ac0779d80e97cb03d639ecbd518779d9f4cab929218a521eaba849b9677d17f2ac4edd598d5ec0ad9cd2ea2fbdc2cc335c68ca11c966f683aae3f7a04214094da4837e821b74bc9f1cefbe77e1c4336f9e47bb5ad9707a419339294b19308672c0d6bfe0e2c98ff714b8e9c87da827fbc5bb5960b5184d57de47994297d26d99d2300c613b3fe00c0e3c1fd0f791cccb850cd030b04105548867378ae7f2f511a7e18071e6de5f3e2fb63207833476d8d6e65ce12bc3d18656e5b217a0059bd98b9968bdfd500ba317a695d81feed2dfb751e0cc00969fb710161c031a7e5a4f0946dda7de225bd49800cc790e387b6a3c96fe2ba3341e71eeaea9a5c44bb42bc47f89e232a29bc2b62821135859fe744cf6f54d8f281f85e0635de0087250d5dee16fbceb4d80b4843eae6404e204e8fcd46272984e835c38c0a9fe60f20e0351da9821397fe8c68f24fbf7a30144d38a921d16d05c65893a4a125a81d3e3c54f94edb0a9b90fbd27496ed0cb2bc990a2675400e08ceab041028ee9473c5b938a43e820b5ce333a1c7325914eeea3a2a0f059bb58303bc6701e7bc091213ac8a9dbafcfd8ca12feb2c1a1a0b13e551c87aae0caf5f79b4f673b529fc1ce473b3a9c885cc9adac7028cf18e817bb2696c2c9557f122"}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000109280)={0x0, 0x0, "b09c5b7388b1aee4ee684681290554dddfe0e55a1acf23e899941e7de12d47280913bde67304589c6473f13b4e7b212ac121b7d7a4241f27b83ca3e1ceea76dd0e22789558b12791f5dbbfd691d6bf23a24e4ffa13217ce3c501bb21ae3d2c1876f99b056ffae5a576905b519d3ed7aee6b1e07a00ae418fe2c7a12588f82a43c6ed624b5b97e1fc387ec157cf24b6c6d94706d8bd254575988479a0e40bd7c6a45dc918fe60e1c2d1c71c30274aa0ed362698afed0838eeb4ff5c0ce34c3ff4a1b4ef8660bc86c3511349eb625cc51c9a51e38e3650f602952f68cf9ada9eb09bedcffe057b504a32a97b1d028c5a516f97dc47b5fbcc869a39b71b11f05b15", "6f1c068c032f7147590042809255e76b9766a6071d9be2763d1764b36884c8a938d87433e1578b4bfb04a0dfb3dd50010cc9901a6aa60738a479438dbfbc80f7c39744db7a65de0c4f9020f6f312f0abf96d081dbe3f431923be568d59af0b5216f0eb887045dc5346ae69a20d6f3054457d7232d2042dc74ed4c0211609bb310495f96d1265462cde162d32d835963e210ff1644a185d475f43adb918c9a39fa343810b71fbb13cd54d9cefc8b97dbe17742e841377e94e08ceccc565aec6e8eb9e67514a6762f579bbadff14df8764b983520b3f6559a5a8a966408ad578a2ce2447ebcfc7deab1d215f4ea41b7c307751af5c93207397fee84f8f5cc397f11a094764834ea950b592e3b190529d19f4d861846642a2e9ed56e45e11fbf9e52ccfd7cf70bf318c78d3c93367c1dd9c1b400b7576425bc2b6dff15c0f23b22a2a3dc1b7228e20a96085d1063e58aad5998c5da43113f4806228ff26a78f6e6433aa1ced0c2786c5432912336d3994666de613d3210563593473f592ac202c17c749d50f9746f66815caf6eae338c093d529b420254e0928fac88c0a41fbae2b97d337825926d734d93502d05c67189eb2c4c065d073b7d2ea1379b466c255832e1259d4faa3d23aeb47a67bac168f40e99adff7341209877e71e6f1c60141d3c051654e43be26b074cc5f2c19d1db505ff36abc8454417e8f28f825f201e4952fd833712e5a3ca3be49883dc5835c47de2342361882aab53401d1d85244422479b05e24d5798c8d4c4a1b01f862de871b7cc5da659f280126537179cf5591dca2979156a720be78c4433943cca600f9e0260947515b463cbb50e0e72c51ad19f19f1960d4a25a4e80a4a8927c8efb05cadab23988ef9dc3dca7dd7bb736e3cc3ee21af52aac505e5ed452a970fc2139e1586440b59e7e9ae25957b5a7642f842b1a052e86035a58b91bbf7f1d32a6a8999ff0f5c29d3ffbf22fc35c7f67b92f8e9e4c30aaadc6fd8054b5872496169b00a0442bd8286b200fac69bd84674794d6e207f15d7a3fe79eaba3be275c4062e2b2b5b067a10586f3218578f5bc717e93e1fedbb8359016cc2c18c1df70c0fb095e5d46e918a845728b0eedeb8c6b47e546758d9e7b782371e101e6b7daa9fca7cbbc9e8a2e1e3436b28aa6d67105adac636895a1434a452386a116944b86092d52f424761bb7bc6014d3960763981a03278a8c96fbacfbcf9f01703ac96aa2f152237fa2768c48a46503b98e757428d5a2aa22f9ba7b1596a5db443d95d3fe0d49579138856e073e503161cc22274c76a20f87ced648df1fb45f960e5139c81088be36427a84bd81b942660f8c96da52b58cfd1c486282240562319513cd2716a2716d350e06fb7076e761a751addee4d6b0db72cf9181eb06974cf544fa1dbd8a022c4066a33aaf2ab6e5b9d588d73619350ed63ed5dfbcf6f2e1df0e5a24435446cf4e6f087b2a46e17e8335f3ee0d633258eabb52938c1807ba037bcd55e2c43ea9a0a702f69457d3c7e8f9ad694d0d1aff5cb1df930096dea378b2c164a11620ede9e3c2d96e1cac941879e1b5e261c4237eb2be7605e951981963aa773e455ea20f40b1e0fca17754b47cede4525077f9f82e03d892c066fc1c6cfd90a59a7c10c121bba269081faa9ace7175969ea0ffb1f6fd334b7893fd08cc1c077380ea60e21513042f009b8ce7bc6b2b6829e992d99dc80461294bbef50a9161b505ee15edb90b3e2a01801a6778e00c0930989b1a856c65d0c31f585dc98a8f364916657af06973da42bb3eb024c92638c2b9882f9abf5c556faad417607d2176270ef7ac153ac80e7f527249993de9c28f84f17c156c89d0dfcd05159ad0f48f528f0b8055dc6240f5d68b224b0fa337e9b69b24df8ea1a13816fb24285c901c02bd81a16137d3b5860ed460ea315546a0f1d8227afa8e0687dd8b04d875ea133fa7ad3b26f7310214d63d2585ab8fbeb3d2fc1d876422c2fd872e2f5e7895b00eee022817633a8b1d663d6d221aa9f30d301e530b66df280b1eb4f620d89fc78ac2161a68b25e481e0e9a61a0e71f2ffd0c815c8f10e2a33170d556515e5d7d8ba98b0865e56c6a64e3d934693266036028e7d068cfa2e40546beaf3b77ed5660c1d021898dabb50124ec7b8516013246d0e53cc3cf89273ad8ca90eaeddaaef6f9fc02b358401598d5b960ae3bd0d603242b3f0e985fcd71b1e88517c0548b34fdbaff8310ddffb8417557c378b5ca3f2d01da11d7c768d741cbd01d69cc2e3a15f602c7193ecc581dd0d0acff92aa511193c07641f4c9a767eee2bb0f74a98d7040482af624d4e70561c0f8c7824e469792698eba844a09cedab8f2da8d54fc5d72a3207ef97a96c56f28ddc8c5a2cb0e4ab307aa63d06ac1dff02e87bbc013fa1a0116e5d5fbd49f362dd7d6ba008bad40c8d9dfb84799b727fe0b1726648da01cc10d700d831a8625524436b0adf585a1ba0c3e782dd160004632fad8017f70e9098c0b38aa04b0e18c21b00e8bf65f73c61ebdd82348ff2cb8978c89811ca6b3fa4fc7ff89c3c6c086fb1555049789d399cbb8b9696157cbc4f20ff1d6c51911e44a4dc2614e8b19616623c98423d0183f6ce8e0a01f109f3ee252affee1a5a8f24485b10a2415f2f09943a18fe8c24591c2236eae762c35304ece76b9b948bda9fb3f4f731c19b18720a4d2042e7eed55b5b4c703a2ec1bfb91ec56d5dd5118e746f0fb792094f026c79511760b98815132c120329091e997846de4c8604dafacebdb60e9df3e21c92184cd5bb1ec1d94fba4eb5cd860a40ea2c2eaf9a839e867c00ae1cc1ed7dcada99bd5d93789bda42630330a0a49b583cbd9e6bf9cf7adb81d46ca96940ecd2260dcede5319dc3f56f6b3344c8b8a60264c178984d0c79db797d05f632e360cda0b7bb878d9d02430b316b30fa991c11e45992e1ad2392397f38231e4d3b8c35f7591251feacea7ab3ec32c8a70c3a54d953792946f4f1a2cd189ee1c50c8b2455e384f5cfc7e16bc2cdadece3eea6dcde74f19889e4c5f9e902390783308ce36c1fa47adee488cc415a0dc125049fc84774300602e87e706593b37f451744de7346b79603de2c2abe0e59d2bbec650e8f94c44b8c029b486b718d785061629697161835d9f30e26eddfa40851cd21ddd8962e5accf134488b1f79d2623a219cd5251030913aa276224455eb9bf88fce8736f0d237aa2dde221faacacf7a71924c917cb1cee6aab5638117345773fffca91c9c01e04b9ed9db456608e3fee67ed91fc203468ed553bb32c49ead59e16ce4c61c4827aa5f10b318b586b61feb6148950687674ede9f78f865f47e4d81be2fcf5a91c1e7b57685ec97367889995c04e7de7089b9bb105fa53be7e0f2c423b0de64a19c82c13133c15771b3a375a1f1347889225c7e91b2a36c9b5e4ff47ef25df56993f0c8d31904bb5b4c3e6605ec11f954d61b28eb594633d390e45626312345993babd070b070187de8aeddfe513024ac0ed1935d6cd92004a48592b1640dcfd17e969c09653d53c12202554281aa9a5cd9b946eafc52231bbf7d35b2892cfaee82aa22e8671dbe75808c1e576499adfb6bb8fc60a042513b5bb912f5003059dcfe7d939590c36bf672793d208076784deefde0997ec7a13f68870d8941c3196a3c2ff5965d9cffe2758a078a07baa053d2e54e8640e723c0cf6ca3b63a2e54a27f1507fce6428ff9034c6fe29b1db48c2f84dd25e2b9305fee1f8bfe1d7d802b7c0295358d649b6dca41c065db6c65f23f9fc5cc639ef2510ad0b559fb841625adb496fec9d5c1b9d6ea84ebd705aef3167e6a7cc04bcc636887933f59653a4048c8bdf503f827f9df1c08ab09b51814601d17933d5d5dfc6c8e8abb7243c8ae91d4cb7d5fa6e1a3343169fed00887aeb50662de6372fe78c6c2cb9cb7b2b19d44582aac7db0d26833045ba9f941c2fdcdafd4e1f6c0ef7962e59aaeeb28e439fd060067d22ebaa29cbe4fa90c196632d00ca4807535ccbcc3945616b7aed1443f23456cb9f89a037a8a24e3ae52f4aaa5dfa617dab2889adb05598ec1f88dd7feffc370a6eab1f33a41ffef48b7e0228929ca6a7dfd76e66d024918ff371f95d8cd5eb96a49c1135ed59e641ddf80c6ba52555067520349c7acada42bf7b48d9c9faf79e51825a971f631fdee010c7d748b540ef72ec9400758a9b3ba32966fdbccf5f068578144ed02e3972737d5c4fa04d81c9a3fdf08eca43fb98a6e57b496224e4b5494cf119fe1307cf0ebccea0a10657d21cc4140f328af5b5bc6721b9e7ed97ce21fce4ab72cc7b7aeb00687b86c4383f2b183634c892a8dcc3edafea99e1f84520f3ed7273fe3ebde10d5f30b91636d41f2325f272e6fb3b6dceae4c96d1609bec8f7eebf18d6d8c0520baafd860d41bfdd145a80b03dad10a9407755c7c70d3498280826fa5cbd276ff3f88ffa5f64b3f8260dac4c370020a193dfa01cbf5b2ee8f3e95faa23ca8f77850d92b967dd800c892a9b6c5edf28374abdf5afe8b3ed663579d4ce37b55fcc4b8672ea25ef6108d8f41514ad6c2c43a48b927b883a508628cc8d773a44d06fbfc5f4b9f2d7aa98b6706479010115a86352c68f39658136adaa4aac091a25f06097535ceeb1ea46e6d9e1553b2b0106107a9fa10f2e52e286182b771487600aba899909fa99c8af8337d24a6c201de9f14f507562477c1aaa637f55343442e5523b1bc0315de2b00bab65b2fb562fa6be67c7d60fc31322966690902d40bc3e8a9feb2c4b7a7cbc41bdb08bb58d8a72ef4f0327328af29145475edc5936efd2a70d2629a9bece24128aedc1a848e60ca8f1fe906132926e9ac3cbfbf6431986d464eb449d3491faa5b77db6c0da7960bbc530d82521d61a1003b4bf4f0d311ce2f09c9bc7c3667501107f2d5df9adeca34986365bcbda12fb13361404f5230c662eb55de60537c66c95cfd68f397cef05979353725d216bac144602de1ee0956e1da41ce0a0b784807456f7513512da7dfd3602d6d4ca0ff057cf13522a2d0d838406f3af065d1c238d70d42acfbaf77a96568ff962f04af47039b5e29458f2adeb532892fc865b239707bf0ebea29a99e95d6a0c85f2b1c1ab8d962feb41fb91c9772d5430905562898f40ec89072f59281ec4a92e994b07f8ea7842633e91f9a8bc47ab243c75f492d088692675cad050ab45aefbce0dfb3acad1ad8758bc00b48362fea224c5cdc1f13bafac1f036a9ed1014e5cfd88e965d0538e812c840bb94b3e1ae4feea7490b3f16d5fb6729337b764a9fb5c8e7e270bdbeb8468a1e53343e69a0ec30020a7606d152496b7eb064b75b815ee2bafcc40f2a01fb102fc9751fa31f36aaa954781a4e206552b53"}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f000010a280)={0x0, 0x0, "04056b7c7f7e6dd29915008e2eff8c86e9b104ab19fb3f79bd6e4f4966a3faa7bb58e93ab6f73cbb45747b8c8e133233056d4311d934ae0331ed464c1bf6ccd88ba300c8c130f15276306a865ca613166d8d00a4c74d93c1b8f331185e71de1d82d9425699604c2fe0491d1f8e273ee7d874e704bdac0cc705eb11dca290abd9b861edc528dc2e1623c78a922612c5ba2c5d521f7c6beaa9a356cb1d7bcb183e4599f086e018f31cd40fe17dc80c764332d26d0d285c325d7fb960612378206e196be9b423d2911092e4c4a5182d7c5dc3eba356b02eab6b1bd74be9aa490f94e32c8201cd19806cc72d70761c070d7aae4a07d0b21323371bf0009450364308", "a0875a9ea5a6e7264327cc49d7b63a1a7072a38ad4004f73e69465c6f91bb88794e596ea441150c2c2a6870fa7bc015ac859978dabd9ea905b2d4697b3a85b649547ec0e2c0068f01c20e505bc2f5ab652f2ef3e75d77c24198cb9c337a4de622b7382440d570f822b7a4fbbd28dbc8f24ebf153fa1a3472b01b1583617ee033f6c36047a5b163561d6295426d8445f81cb16ad7bfdf9af5dec3b6672744bb9e3ea8839a44d2c11b744c151620362f1133e91ea9edc491568da205816bd6025bd7349d2602ae2bff0f5cf1851ba9e43d963b7c8e91c41546c2473cf2491fb5275203d032150aecd89945a7d2f9fab3801083f99e8b26126b8a5b1af2820b613c74306986b76fd354aa90acc5c23009586d25effb83d9eacdc151ebcd697f16a72f95095a75da4cc42bfb839175c65cab28b49698e7dc624bf3b51032a5ee3da63cceafc3e00fa677612934339507ece9114649496f1ce8c4e253fd8809c29bd736d060c1d88bd70a5218193face8a9897c26b3fc027ca758e9ce75f6762b77f7461c27ad986e96c100c34e4988cfc2f10d2a72637893f7af71132e6dcd10840eb94da3bd4eef49215185d6cdd5b54460668c3c45533703da79e52a3298fa7986ab93364e8a37e2b0e5b5e0ec5adfbd07198440652ceebf08a1a008ca3d7d96aa65726992b1594fa86dd40cf71ea771d4a4bded7ad66cd50954645ee61792eb3960297616729b07903496d15e2455786f64d163c70670d0347822b01bdf2d19be7c40b6008e679d8fe5eecd499155226c585e4074754c26356e750b1774a2495e784234035f154cd967fbc38ae0eff50e5850f2e7ded0103bc8976d3c3112898c706be4eef057422becaf9dd43e44dc0c49d95b34c227febe9eb8df135b65703e6e1211f70d533f6afe70de1cb377863f51dea9d28782fef1ee465b01f2e4345739f9399e64e08313d05719eb9d28c7d701a98c1894ca1463ad0b490e72b3176fd4e8d1499d62ba2e54c8a1fcc055899e719ce63168bfa992cbfb38426a3a12b3ca79cbe6178f44dd6a37e2754f8375b35bcf4a29724c4e2e6b233c343bda45b07d2858324c988749566a7b7e1c47a41e237fd364db03f1943239183c3427b150a302f766753170df9169a7ccc4c36aaffaf9ce47c4a4ae5fd722e01768467d746f2890567e6225c07a1547041128225ae172e0c0297280d63482ed4595422c6a3fc1d7a956b125bacb755d2f6c3a6bf7b2606767c8db25047dd77c075454eb0ca15234d10f0eeb2aba214954dab81199f204aaf9d3709e321f0fa3830bf01ec20e01a189e22dfa5f1b2379f009decd459e8242a8ba4da56667cddf23ae39fffda8616a64a46cd4667cc74af5f61c3c939587e4d3c645baa3fdcabf1728384cd0ba994a7f9fff80aa551a44c0122d2c1fde3ddd44790e3eb6f199a4b086d4912f01fe8c9819bc6da0b442f111ea803aab3cb96dab6a5072002f809d137896ce4fb25acd5d9bbdb3d74a1325731cf12fcad4e36823543539117f9a34e5495b6dd021648c7fa0144c1b34e41affbfda1b22325da367c2536c7a24d8fb5f97fd6183835889535b3102b56d05e0129a0eb1913ba288dd1927e69edd7c903c5d85f9e8376860f5cf656a0af24b200d5d76c1436be1c7aa1e36435a9fc639bef7231689db27080a0e2049062ee39a837201ff761300e87f5af369c3f7885fab03936b3eb182713300fd79911505a21e678b4549d703962c8ca22b731e5bdba65b665aaba6a77ff22605afb914dbe6728fae789dae636cd1029ef06ff1b01c37a44684651817db4224fd220f0e0a8542d84dab93d88e1721622d1f1babfd5063c402ef0e940d030b0d247ec6c9d07d5e68023540ec998b361c45507fd2825690ae2ea5fa449fca7fb39912a349620f8469bf0fc9a06c8dfecbef43b83e701955d9058a984a8a90efff499dc2e59f5718a63b1b850356bc64564dc634ee8642143a4c5e54933308fe0e8906770885485500c835f3e106b0249a7dc5422dd3638954dbeb316cfc37450c44f6be70e7cb4be61a7df8fcbce7317c8ff67d881c2830c59f6b3e38eaa93f3180ae343855b8cce66f8b1d9e18ea390e2501f668910cf6725397e69b0cd5a63b78cce59468b0b55f6cff4db050a05262c538cfc9b75ef163ad71879e50ed4ffffdfe0c926ba12515728b23e85bc0968fca638adced2e337bd74b53f443f3bb8d37949ce2f6f3477f0195859679e0c951c8c87b9089e8a596b5d2efd936182a359df0a699174268bddb4e843721e3448df150033ded129658fbef11c82d04a6bb388ff7fa308103d614b1c651b835fa3f714acd4283b3cfaf748bcee0d0b458220a24104814fa5e0669163338fb575d33479b85113c6a92b06e9857586d865139e698f92748d222636ffd1d8f1063fa9979bf1b976c466248489d0050850f42637f446483692ec33e7630a7e0c0c7c366171e8bb0c8921551df58c386fe0eba9e7930f442899f6a6c349702c862b6f3dc7fd26cacce27a7d875f6db74b85aae73accb66b2633bd1f26224848787cf484268c0f594f116a5b881b13da6598ec093095b798f7e0c96bec02022568b707f5eb9aab2f7be9d4eaddaad3b2c156b06cf0082dd5777fc78530f53815346071bc9260ce257b54ee7ae4240679b12640841b0015e0ec22feae3038ec79fe04e67d4db533bf44b934605c52245d455c3431fbe38149ec0e11cd2e4b477686b07d19e7289f7929ea4a88dab635ad3027a0af745ebe4e821c1d452c03839c3e56c52417c9ed769409dac28ee0847a71f006243373ba4baef85ec9e64cea1570dea668f2ab433ebc57d690f9dcbc3dbe8bfd64d6d8f2610f91d92dd87d339c93723a83863bb5ec8a513f345576504fe00a8c338d23e425377190dc1566ef3985cead73e69295482db9401705ea05c6f31c701c9f09df84a4dbdfc15449e505691929ce69500c30a9c757fe955a7bc2f2c31abf72b9e3f1ba773fcbbad285d8ccc68cecc07d04b94b5685f26c496f2d141950718c7329e4df9da645a3a6528c25ba8f4d2a53add4c3df5aac22e56830a929cdc9e68773631e89213489b4bb8bfb41e140be73ac99d866578d2a5d48ead81963d6e8f18220ed5066283dc2f9a7cb64608fc474928d2fab719cf41d10066c88c278bd0749908eb4374c0d9b45affdd1940d48e047e286ebf34612932ae710b650c70b43afc47e52483d226d003bf3245f21ee9f3a1637b8bce0176df2ba74f885d8fd15de2a69ac876634275f41462fa423cf7edd5ae99d961565f57be78a89e4e5682ceacbdb19fd278a005811d37f52e19e77b4dc1dd630fdc6d07254f644b31151689424daa06e1b380586f1fe51f0d2aca974a0d424e4c72b2899d16e77bdb7a976e82afd0865bc24a491e79a18ad76fd8bdc790649f93e50c770b82511c3cfd2d1eca5c58d3b36be351ad0e7a11fa95491b6ff4cf3640eef5d777cd2abbee158ec491409affb987252627d4b59c5f637a716fd85dbdb4a4c443cbade0bf88b9fda657ea02e59b1c5e0871e1b52f6003df0b69e49b314939e31f89afc32039fe0da2a23e098b911915a09b1979dc047efaabcd9aec8eb22f3680a3063a3d3c380acedbc2244fdd8f39afa21113a1c9da2ea8c6224447f0646c8a1c1414c53d7a789ca8c5348678c52400aecc620f2e780ab6ba723b1bae704364e069dc0e439ba80e8c47df6f4097afa7d05c720d8395d17451e7609e4c391984576fc7a957e0680debc2275010a2e0a521f901beba45a3ecf15d43d1f7b963a7b95041a353c70cd107154e60b4d7845ccd9c15ee02d1fdf12966d2d7bc0912a080d9402609253489361668cbf2ed1753634436f987f6e5d95947a9f9257d1d17f86969bcb4269619be68b3c218ee09ed062756d4922df3dbf394e0ada7c973e113037d92cec97a153d5176af7ae4849cdf3dced82c2039d64029a4815d36eda44e1cb0b74b93971dbaa2a14e128e5fd1fc5a4d423a848132a1d57a3e1f1e613a166b9e3b2c9a4ec7b1b266b808ca19c68afa2d5499cc7ef6c4178468e7a1f8d372ba326f7fe3b207fe9f97beedd541472313afce2c2d8e497207d2ae71b776db4e20e5d09d54f2b31aecafd7028eae0d99ba690ae54fcd3ebc849b89a76ff99f1eca984f153c1350a9a2fc164997bee4c4f0466fbc3b435a93e5785e3d22dfaaab32e84604195b3f3fd36db38b3d6282bf66f94e51d4bd7f977965b43da477b46a4bc011731f6468892acd04bc70c73c919af3412b8de35eaa220847a82466156f584392d1c04bbe384b6893188b1b20f08e15772818e4e90a0c88b69e70bbfc6a09f9bdeebae5ca17da295d08825d147803b80f1ac60406f5186055f2d6d22a7edc1ad5fb372ad91da3bb155a7e27d21bcb9bff3e0ba6b7ab4de3aff1ebeaa2764d1a6753a76e2135cda21581fd4293eceb9736ed7a8e80a2ea2e49ee2455e54e5d51eb0636fc690c9757b03dab6719ed61c2afaacbfdb9d65e34a03c6e19cb21ad7fa7845deb7603f00b144fcc70a86b81d9c7c720f3cdc3e4beab868dfb9f4da0fe890a6549d0a17254b8f5edc9b0071e406bdcd58dad279f71b9eda4c2baf1870f512ebae1a841beca75571e4a98ae6b097a56170b56c5ccfbc5be1f17808cd23d4609385feb73f883bc1f2c8033b184464664ceb274240a23259d2a2b9bfa30b41e9d5ca11b0073d87aa4b5c31f41a00281b3d3943f11c1d44fdb269a567ee2c06d36b1e0c36fa9da7844fcc4d952d4b0ad02ef131e6efe5063dfce5f3bd5985d69c8b921e40922fc5e947688c9e0452647888d7daf4bda4fff4c6ea291af9da3e2ca84974922433940d84df552409ba08b7088699d3f87d577cf9fe57d848c9def4af992ae6b3d163c6b5635a3c54f0219740354b08d8858f3b6479fe2639ef059aa7927f16d5aa3ac80c8d41fb3fc6f2bb799afbcd77421c6706bc817d1edeeddc0cf89a89ad67674489bb25b745b91f5a0fcf89d2ddd4771e551bd686347ba1eb76a67b5f9c2e2455c7d03dc71d242fa814c92f2d0eb56c1defed72c6699cb4b5fbf0ef14791beaf62ffacea02f37e05f25adc38946528151e1b692d6a02b176a0183b1d69dfc590bfa2622bf07123ebfa1a0f8ae677f0829caa1133319667826d4ead4c6ad69169eefaee50c9ea108682ada32cf191d89ace5073f2d3da0214026d57958974375882aa831a9568adbda9ce8b8d6183992c17aa78742f5a709cc29b618741fa9d2ff5f67101491f8bdbfbf0b13efb4fed082855f9acf0e0b756c0e2258a7d2f9ddaf0f9db8bcefad4ddd339f9c9dc9ca392d0d6ad227c04e87562b302bc532f245f71d785b679939c99a8bd886006ce77b2e26167d408f30316538c72fc8d39f1963d56f929e9"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r1, 0xd000943d, &(0x7f000010b280)={0xfffffffffffffffa, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x3, "bd8d7c6d3e5937"}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r1, 0xd000943e, &(0x7f000010c280)={0x0, 0x0, "c847166acb619ee461109d547981cdfb0068ea0f0b6fd669d0546631ff205fec847f9b0dd5594c154a5eed85a1a4db2ac964343dce8936518d11bafd54c030beec2f43b8067b1b32fbc906f60106cae0dc16edb8b98fb80fa896992035c5226a5814efe3b6d14b78f6f1e3aaac0e8815cddfeeca864a1022b1a4a743f126e6968ee3b3d7bf3a978f667f01fabb59def7a90807be224ae672118ddac64e5a2c4e250df78297f4713f9ae193555875e0dd0d85a2a56f47fca4c34bb14c8ab2745dc817935d10644d78a7713b84cb854f4db344ffe684a26b1933bf5711770c2f7322d99948757c926cbd30e1b44cee6305d4a1fb12f8e405d15e4297c452aca924", "e45376549955f79ef779bae8f42f3464345c5497f1a7d72804e21a9ce0fcb998babbf55421d2b3e79f5ef64167ca03c3c6886e4e0e28573dcf43e2fa50ff681f2b588040a559c0e5b26f82ed3a0e5f0968741a2a8ace819dd2fa332202915195104b4f2bb7fd3ca6ea77578cc28c9ced5156446ec85174c195a99d84077b1c4d43865ac16c0598f176fd2ca85e70db6f4c652816ee8d19d988d2337177cbf43848192a3bd86f469df76db23ee053c8a72624b36e594c963b3131c9d0a38a4dc961be6bfb3f49c33974246bbe5f5698122926e97c20010eb0527057e37e2d802172e9d031f11f62d6bf454a01e5821e260a0beb924bda9fbfccf36a8273a4bc79830c11397ef66b7bb3b978b8136c2bc526b15cef6a8716005cd1064d45948b106cab4585cb6a0645d6ba859f123fd59aa502998731267b646230b183e12091d738b9a36705d8fa44ed6a1567963227c258c4bf9580d3a5fc6cb5581eb68124c9d5c913ff8b9a6cd51754c82e236a7c60a18021b8fb0c646da50ad744104128a9c3aaa259b9263ffa623985c84e6caf591cccbdac4e7dbf9546372deb948eb49475c8815b12b89410db8f9c134d8eb9acf2d8d139e36e0866d2bd8f1c96f543ac5df7702b644b3041e740badf9d82791750abce8dace3f1062a9a0a014c43096fd163bd78e2a003096cac4e4f8c87353e43bccf43437ef928251a719827f279a0bdfe7c2a2d2d6ef41841d44790f94761e8cb09b6fdc9a8b705652ed6812aff88d68afed496dcc29acce1215400899472950f165eb88bc118bce3e9f9c2ecd91e0f2295064562082fb15193b658821ba3d01cbe75c64870bf9799143a4f93790c15cf1859ea78729c76269e4d4debe659262a6104a663c370b831f1a82bdc02371c97d4438baff303cc9c4c3e0f1404ee3231f9b18210640f3d4434d5c7b78bdbb29c0c62060f627105c82467f9f3981c0990f20b2a9355670b0fb19ea59e24caf8410389372015f3de2a4a51a832e39402a266387dd90cc5eb85c5c15ab9cfbc9dab6d3b9245d567045743697228b255d4fa7aac06fc798bdba01dc61b81bf7d586df7a8b28a1cf196d75852ecf0999fc5706dd677c4689f26e21277d1ddc3a81f53c648ffc0031cdc70248e2c193c29afbf071fd6a58d150a23c49f8281ab065b6640087d1a61815d07e5ce7be012e329fbb4e1e1e422d5231bd4065b14fb9fb0e1563c6b31c323790f5227a67a589fbd72be83e00178ce6e7c73e3e1d9fc53056baf2d6fada8992a3dd003e80177a83ab61f066a061456337d202d5ea0eb3da8d598ff897d54517953accb08ee7ae3a071328d36ab0bce275de7dbe91ebd6de61c8f7ea696696cbbd33ed50822ffc64b19faff969b8ebafeb56e5b93d1dca63fa5fc1f5fbcd36eff714d132d0a13ec885a57bbb851da1c9f3142d8f727bc271e56558d649a0c551a77b36b55d52eb1552aa7db17bbf0eeae26f3ef472f9cf797359cd221a80550e0e0c1b0fa4af51354eee1732cf1c72db7e77270d135890e8d6c38e9bfd9aef9c259725127b7bfeec22ba984877743869ccf9f0cc1f580d0e2a626e10e4fd83a17f8626400a276e46ab997540613e9c889a7e5d351a6032f2abc47545fa39b44af8fd1746d764b91621cc967b831c5657d133792f0c11f86cad0bc64697236c5a7c63478af7a44bb5a664a5f47f93deceb8e8dfc5e82fa8fb3b8cb1c05a84882f5c5aa35c6cf78e397472136e26907229dac7b69b10b591b5d1f5d85af8a9f5a574af37e6f97c00fd739567e2fc3cb10b2344614cbb9e6c68895bc5dcb1bc3dfe5c3993ccc9043c5b710618021ba011764cc16590a799f26f359b3dffc2caece18f9779c222ac49e99fd6d19ab2d6df1d5a5a11c69ca0792bec22a73b80259cc84fa50e598838b5e0807193ee63965899ae0208dd3e634185cbad45c0ea368d95ef4218cfc5a9545404f7c9ea576effe1d29703442c34b953830029715092fc639ad3512ed8ed0b2ee31dc1dfaecb9a668742f53f5187ca7f7a32547811ab3236c62e2b27303a549827eb19d488b485ae07bcd908cab1af49db0a3b6910b1db6fb3db53476b722b802713bab1d7e76b6ff69a98c0fc2227d4e467591e38b03315f6eb0646b28b394d17c26c78cc3e0d2d87778521f5bd568e57038dffc841545645fff4f03459d8638bb22f209d7eaea3dcd9f835a89fd2ae2468f08168a26bdeba2a11f7b093feb40ed584a7b8ae7ac8487540a6dfed00343951ccdd67e9b6d6222b9cf277db68fed167ee2c4921ff8fdc6d2480c55aaa9b45e957ed8f28e80014d4c756201988a420496eb17ff9117ff51e369bb99522c7bdd229d46abc6cfdeded96a7f37d6a08419a32f89dd37e6adac1e0747779c5d1f261b9bf7a8e67bffdac5560ab5073fd3a127acbfc5a14ff20c6fa3a7f3d388f0a860bfed21c65f5243e0cd32028f84ae3505a9c97ad3a75de11107da7ae64d7449c59b4eb9c22646d0dd17025b53859918ae2dc2a9f080459701a1258baf7d648ee08f3ac38888990df027cd64172fce30cf80d9397001d4f15b5ed2fed5503f03d7289b5159e0afe8036553d26d9472047d71711eacc46a670c532ad5259d0a9d7307d41ae53ef77806c3a22d0ea73d99144c90a18f4ea1a93684890e433f24aa7449fe887204bc97029ed8501b62b916deb638da0af34c18a756ee02288be3f6e39218c6ceaa1077ee05937f37eb29e8f6cdcbb49c9b7480fb433486ca9562ae8c5c32e2a9bbe1fe92b68ad2d5a565386401d55c32805c1f68983d3ecd64f9a7fd3650289eff4314986cfe3c3914d86d470635dd1fe389c27823917d98c8b5b1cd1ab3f9400d5493f3ff95aa2cd57e5066bf87075f2b9a7623d3307f034b28bf60304876cd5de2274974a4cbd431fc152c291a821d6803679d7cd3e758b5e071e6608bf6f06c3f8925d215f5b7d7c17aaef5a612085849597512277650bbdbe79660fc744f0c331bcb777bdd69f719316500cab4ecbeb66fcb251f7795f318767911d1a6e7f3ada0f93ac927947cfa479d7d20e972b0d15b1a5d28d67afea9491ef5415f98f696ee3723602eed68793b405f1dff73523165825b0464837b8146b5cc4b6621ffbd2d5023c1eb16b8abe09151ee51daa782b3469f0c357b8a04fab73df464dc34cea895cb9ad5c9106890931435e2409e51277d54055069b4f1fa0b8db1fbacfec2622cf85a681d1de159e056f7eae0eeba739a3b7732084cfb99ec5d686d835f0560cd2d8838bed6ec4f6c1ff5e9c5c06a52caf1687753c5da5cd4194ae1e26fabe435cf7f626c07eae51bb0db5f9d4d4e5191004cfc422b208716fdaf69384f282770786d7bfbead48915580aedf495a434bcf1022b5bcd86fac09fc3523c86cba697988936a76423e7f7524b41a7bbe409fecb389f77d47fd26f38cf7ac92298d5c06e30956aed0226bb8f13ee23025da24161f886cbf806ec431479023e6acb37282589120bc1159593466edab5c2a8f4b2d931042a3b621f2a90d96155963e85521fef279835a1e2b0ef18ff57294c75ff84cec261ac064c23acda048d51163f2d10d33d3b929de8c7d6414e97217fbea6617f25fd80cb6eb51239a0a6c1ec4c2a250d4cd76d85609bc2ba9beee07e6ef80a6c569cb87ee986bc4794e909d403bfc82ea18c2b3221620af66ac56b312407c64d182b22b8b8ffcea91c6df2e1425027ab79d1c07c87479d050e94d3c54cfba1f770c723a934b95dcfd84bd7bceda961bcd75d58e3b223db22e0ba40ed68236a80e2ec87d6094882493bade7450adb14c0ef76394f2ec8164bd7d2137b0ee7e8e4805046d49987b80463fb571d281abd3f627de5296bed0dc966c0f5da965d489c1f0624378ea764ed56cd6363a12f7c00c6812582009c4a794c1127f159e37af66a26417fbfd8f14eb0f5ed1783995fb2bb2bdae846ac40ac4e56beb6989b317557b9bcd953b5123c624d544fcfe1674c95dda8605dc0a4641347297433eb365b89d7db673c9e902696cfbc88143992e03566054812febb9f87323a315157c41997557d263daf1406a36988ce5e1c622e6b350d7c990ff7b8890a27657c7dc859b3c5cd5a69e4b00b896d9a33927983d9b547b3731554ae479598b52a87df11d9109d38b4b8cb6ebc045066242238a3cdedead76df1b924e2e689fc38d70b2810031b96c1652c777f8326fb3bec865564df6a25cfe9f192a5f8f89f8bda191de99a3003e0ea0bb06006ed8c9c712fe77f4483290b2ee1f2a59511b26c626c681b789a01f7f2396bc4436eaafcb2c7130f615729673a4fecdf72b3529059e592e57eb431c72c5b2543eab821f6dfbd24448018ce526c9924edf225f54901a9126c77d140c88aed9a05b9aa86aec753db5ef3be78be172bb68f7ebf1c328798075541e26c526cbed9bf2cd848795e622b02542af266ed13f0639dc4e84674c09b872beb017b31ac48975e4af7748fa18a1531214a2df62b9e4bf969ec082c9de5da756dc59df9d29c42309f6db8e9500db2653ccf5917c2c2d8826f828a2ce986bbf80fa38f6dbf63404fd96a3e89feb7a1e2c3981d090231851457ec47368e7dc52b03145a8947c16af1d0b58c343bd6f446f213b604c52d911b46daf4bfdcafca6bcc5a3354b12a2d537c57bc48762d5791b2e930bf0984439757eab8e49aab9460a72cc2eef25350dacad07baa1724f46364de9cb53a248c03e093b854275a434aa4505dfad1877aba558896787eaad9ed9c544165d567e617e19ce09241494684343d3897097a67846bd7a0cbe74bc3801ccbb3feac6a2716eb8ee11490d782bb1103451b005053b0f214b828db0d3165d44c5e3cd6105cdf87bf2a5fc6fc14ec0d8886cc8651351bb6f20042be7d1b407bb6d89bb57123756bd0bf10ad69a508e3620d082cc865420a3de1a78e17f1f1058b3c9f6478ec4b773fe6bb51586a67af1e0102b90ebbf3ae8a7fc1ba67330dde7729af378f5602a6fa3c6c4514e7927ea7cb81728231e2826bfd2d63aa9db542394bec789311b650b52ec87087307328734e5c3789086a9392d73e39fe8bff857616b480984c487955aa95c700bf246744c8a7669a2a798f7c05e73ba69ec774b07e3fec761d8c6dc2eb03e0992273ef94477ad19aaa54c1de4ea7a650aa9dad9c788784e9ed1ad7fc30855dccc978b1e4f70e24ef55e038e16e9f39d4589b8a6470df3fed4378a63b15623b488402a69cd64cd6dcca076636867e3b24f0d265cba0b28215cff437e97a984913bffdadcd239675749efcbee42f223bb0becffb494f21f98f662a2a50b2a1421543ddf281bde8e8fcd73742ebcf884666898bb1dd142da1040824dfb7b61ca694dda1d37c70fc4b7abff5705fe38d7af8792361d8"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f000010d280)={0x0, 0x0, "8e3e2c7f126af40c467d518ebc9d5f57dac865bbff298f13e0005d75eb891dfe068ae66cbc47b2371d488ed5f8b67154843ce19606116a22a705306a1aca9ae3290a6231382bd55c1f5450ff1431013f79c2e8533ab151ca6f8a6bdbd4c3b7eb53a648aa52302e564e993133b1f33573f03595fa7d1ffdebe85e4167baa799013db4940c14352157e701acb3bcf3aba346c9c8c781a924109d63a92dea9a3bccd7b561e30d93dcc961cccf009c8b898ba0f9bffabe6cd0d341654f724c40583ee3788dae747b172951bd94f844dc5ff66d4b95855a52961438a99a7ed61fbf16e0f9759ac2d55857ab18256d6ff02ac62f6f91f71f015c856fe1ec56aa8d0ede", "fda610a93894710b843c035a2a08498f84ea7919120e49365f49b04cead69b82ccee0c08063d8f38edccee30173b72d0fa534ec65a039fbb1006da38dd51b29f8b33bb54a46e947dc37bd11a4f11f17bea61ac5349ab7c8bd9077fc59316b7fce7df3937c8e0230b03271d3d0d96e86e486a7697731ff600cc5e2a83b178d7713b994593ae6a274bb88446145bec40952d2e8725b59e62d2465e11b58af1ed6f19721f56c08825e7931b047237fd5d122ead8545c1c06ff0bf557cc46af94033945259f6344321a43cb5c4e6945445b3d6337722b4009e9371c34c42967deb86b9752cb862b4eb2bb04ecb57a8907843895ac083267b418d9a0b6baf0ff551ab41b5eefca6d590da7cbcd5b85f90e7f73b4183032af67d59075b734645f9918ff50e762bf6273bea3b686e8f04a1f34217aa8fe0847639dab2acc15e90fbc8edc87476b09e1a91e30615a73f45743766bd2fe013264b79e436a666f2a98d3e718d5456b89ac6fc0b487767d69f5c7b73fb106e4d17c83e99f78d441b44977c542b8f1caeb4bbadcbf882cf7e07ed5a6e774d04673bb39a9165831e293e664f72cd5904e44409ee61e77860502e4dda41a9cac3e9c64bf300bb57d3efc5de4507655edbdc935bf2a3a1b38bf0b4a08f3ab0b35045df9384885ee44ec62348e951c70b55ce0ab6ac5150646eef885f45b308629a3d55c65e19154a8a982e5df25c511d3789e4f24b42292695f74f09b54b69ae85c23d0b250326bc14149bd81e966bb759ac55e88b769ee3ba69b4908e16ea48a58a639fc1b9c15a278470fd178e05ec02e78b9c9ca749a3d7888dff8cbf0ccad1fc1f8006e78e93248685fa9a350cced819a79756fe203a0191faf21cc35b066e68c6e3fc5c55f8c351ac3bdf48620819748ba7002b65e5be5c57b76876a7795676b15376bed1d2bbacd0ba8de6a233a1a8d5cf315e0d369e6d3b30e139d37aad4d8fc095c769663df367e649c120e3018ccaea57f728d995e185ad4d8d72dbf54efb895bbe01cc363d688c12cbd3f547ffaf06de3c0da5f9aab364dd17804f310b6c6c89f3af611af9e076ce5e2527b95bc67aadf2351abdc2958b70200ad662f84a54e30a97a6ea00f18f3c499ec3a4ba055eb061b8eba3a969e18cf4cae2d90eadf37bfc39ea2651e7a017488c1600073f4fcb72088db1cf15e941a0e8c56f2963cc2937b3b552e77edfabce75fbc677d1b05a82a4634c83176df1d6768fd1021025f05e599480468b8b8212d1d131659001cd37c4c388c7f2a07ef6f2f2e89eeac581b3d827434e571fa431ea1e2af49143377dd37912012108204b158346c7f34fb36705742847fb79a16e8d2a3247138cccdd3fd03cdd50e35945d28e31195509834e856df5d9780102781d8e8aa90aa8555340d04b3e2bc2fb8a18d75c82b34a9843bf2cce381002d6ab70843375155ace706b0fb24846bab77829e9117fa6ba7b6949080a82a1da93d57ee6b2f6543cf785be23969fa180caedfdef08844d89e70f4606b2c7f2523354001fb0e3c45944cb61a8d9aeb02c531fab275456cd9e68e1bc9225e02d074592be204f541bae0e535d48aca2f1332a147dfbbd0482f6b3813fb977e85b35035d8dd03c26ea9c9b8f4214fe34d71aa26c5a765b950452bb8f92eb48f8c50a2e83e3f5da743a876ccb57b0afd14ee4dfb1fbbb1a4a74972ac511f7ff97fbbc3d2d19ed9dee5e4a4dd4a10605233a173dfc75fc92f92b0fe821d50222e9182f2cbc4eeb02bdc05535e83344c94c91e078a9f273053cfc81662726fc7d1cf8b8a89ebdba803251e4a8a5795d9a0969f13baa251dd2c336aa2237d7a8031a2f907cd85c2b75dc6afdb2a342dee98f30ee6806b17435ae80affaf851f460ff2bb34b9ee244eeb88a2cc5f5b440b1652ccf69b4a3312a1d504d4cf42d7ba1ba247e8b794665821ad17b909dfc7954ba6c9d285069211899a8ffb9a6f948489d322e7dc8d822b5c26333586c6de262e0c9e82acd3f67deea196698d4796865e5d4ee622f835b7e9be5d0c4c41c44bb5de107aa1749521e3d4064be2080aa972eb0f9b354acc133ebfbcf1acf285d7b1604ed92b4bdebefad134fa9a668224d90afbc2a0ecea66123fbf947ade45dca148997251fae3789ed8242090d107792216b351163c71d68967f33ff39a038fd9f241803bd2103827481054cad19a2a495830a60c9d45865e55002876108148654fd89d64e668d45cae5ed4c371037b86291326785943b1d3f56d5033bc89d328bc2273d64753fc6a102236f85218278c168037bd939d7a73f771fbeb804e0c4e875dcf7b56258e528d44ce5daa606fcf5940073a7361c176cd69f8477c990e7e91dfd1b7ab7e0a6034c4a429f731faa5433fd3c2bedf04aa2435e32fa7c53171512280e0e56d8b14c394eb280564124e865279a693799a73aeb4fb81c692d4c8961db0e305069c5b6e2af505988a045d64c084a2085f911da49aedeebbb665724aa51298485293c6675b788da992c56901fb81b081488abc475ef0e60bdc049e1064be70d1fe267dd2eaaf91031a0d0f85b62451caabef2e1bfced9edb41fc5deadc007d5c6675290f538245d642afd9f8329452439694a06201ad6807cda8449430adc59f99a781ac1bdfd2a3746b92b436ac5fb1ef6721886aac8e9e3bb8ce1b3a3aea1f70b9bdee99a57966108faf378f2b453e0d8344f2686f028b890962ce4fc06241b6bca981cd097d66c298f139d699bfdbfa698302f1240f6df2484d6249cb1df5c170d9bf090535f751e0e3c949ea15574d8f282c8caecaefaabfb91868e77048a50334f39a4f6b264b1cb7fb7dfa3e752052c29e93a1bbd3cea810fbe76d6898763e32b40e2835854214b2c130d81ade004c52137397ba41e548a3898cacf655b33c6eb2934108f0a55eefadbbaa093ce52ce62ac4879216dacb7d4e5ee1702079a6c06306875411001b0759bd7cfd57c278b4d893de608bb2f3f8736a948f35bc357cd482927e6f80ffcef7f556da4766231f84ec0f69e6235e5dd434628e7fea06bf31dd258e28c875110ec136d82fa5cf1ebe071fc5faae75df77c4e401be026a3193d72be005f70f2275e4b829b6039ab216e200c60739fb5513adacd113d86489b549f97617958e5ecd99f76eab7045785baaf48583f3a0684198b5304ad5901c7ceada492b4f6a19091f935ae21a2bd09eebd1cbf434c9be97cd25a5bc7f80978742bd98edce63eb40cfba2c379d206278a73dcffac6c375d54211cd7859151fe6fe9bad1e6423df05f323307fe026db13be15079bd57de50d5e9eaf0fd9408b011fda201c2b38304dc300e9cfc1975b6b50f5431a867265bc7c55c55ffb88d000411177f3511bf5e4051b25d7c0687cdbd578ae5a641f0023b66628c86de3f60a247d1283f322e40408963d3a5f1812ac2f6c00c025266fed1f4e934ea4cede62305ce6089a886e021c1737a852b2cc33413f33adbe3ba36313aa57c36d8536cde6b4a43a62dcaf8bcffd5eb07af9ece451c897a1d0539989087416df652e241201145a9613a2e7f76cede34dce2a9274dbfd55d224a23858c971d5019eb3bdada859488e9d25f385218d019016029388ab421b9b261bc6351bedbffdb876df6b8ded2521dbba8cbfbf67690b0771419c95b98b57de1d3c37ed736be573251a6563194b52a88d7952f79d03df62abef1eb2cbbdeec44c2427bb3b995ffe045b5993868a952f393396848723c5ac4dac43980b635d4c04a582810b70d9043f2b101cde6943b953ccbbddcf931df5441eca91c693fd4f607142e578c6b6e12f7b2933838d00eb2c702d5a0ca655b759a76230a0e8a90c930bc8a6ab5ce3dddf975ed5cfaeb4ad7d3398b9ede1cde68d0a12dd50aac798b0b0e3fe5e6d48c73a3bba09eff15026e1626acfe0a3bcae73dc3b21c71ef90be32aaceb7e9bf5a7ffce69a0cbf35cd157281d10f337e89c40da9fae6e607279e249a1320290e9f49fb7dd822169b11984e9602c7a99203320c27a60036128d243fc1574ffa24eab13ee3c76dee4dac1e7265bdb4855860143b406472f1d701c671546889d49836ffa2321dc5bb97c52af9cf3d1d7633670508d028763815158a51e75a8520f7ceabc34aaef3519c7811b0a98a5b17b510fdf42e935146a3ec19e511b5a8ccf7c5e70552e94b1d2cf257fbe4443df38984e3c9acc2e76ae037340d8ec731f6c91ee9759f883a89439c5027700e3cc832d5c5aab6cc7741a12d3ada0f9fed2b65fa1dc873e9fefd49dfa7cc4b97a3b98cc60d9d66b261ab0167696ce71b9532f588b7cd61c8817dfa98179e536c41316feb864330fa8e479b19874ba4bc768b6957dd31eb15442a8c35f297e19ae90b30bd3e17419a629d33a14b31de1e44359fe4e5742c39ebaa974dbba3b68f3683d60693d98ca29dca1198cfa3fefddb049f439bee15c6f8fbdaaa61ffc46c38a3d6f960382de63e7e6ef9828d4a8d181c52bbd4fcc0aefbc855d0be554dddd116d06fe2c58da546a9ce7e4cd52f81c1196ee762a44b5f2707712cc3af148a86e5c32184d9f07fa42a0a70b59a7697fdd2f369ca9963e42a91bdd4cb6d76d73a6e508ef8aeaf1f3f6f5e04e6a85080c54a5875c876ab83b30c7d36690698eb991f0ab93c0a718f523dd240e7bbfba2240e279fbdc0309feb31b9b42f6c4288e5fa8e6a222ef16bd60b0dc4c8d329be220f6f4ce4fb308cf5799173edc1d6429b9369d1277f5fc01ff1a3184000d6c781be98a461f9b8df9977a0a1b771dddd2880ff8622a92649aadb3b567f65716d79f5de56027a749962d398260a51740e30598e645145e9fcac002379cefb13e49546431ff5dd1050fcda90064718901237aa1f84c7fca3c1e50e557a107fad9cd8f50082aec58dfb949355ecf0b192e307197a8323a209be51fe08379e18179e5f1f1cc2c4b6f6b63b3cef346985d7940e313af9729ac1d91c3fa64726b3c8fbf7313cfc52435ca371dbe76fb59033ab977b5a566af609fdca57f9961b7bbc40b891e5f2be8ef3bcad7e302a3bcbff3896ad930a696a821809cc69edc1bd33747faf1977e0c1b72bf51aa75eeb38e2658754e534f79bc8a7ce96a6cc4efd61c46e008e879a9b685ebe68d14ae58b9879800e84a641c8ac1a8a9bdf090e9e8c72101d08b6548371a8c7a57352f5d3d2b29fe9e16499327402e0fdec2071ae0eeb999ab06c953c5a2d2639351510b095fcf0c6048137377872ed307bdb382636d547d68f05f8f4358e6c7c5b101dc60fe320aefb945c3b672ac6a52b2490efae6de8a65d1672396902b92ba86a3763844f4235197cfaf29d3535a93671c2b413f0b060ec0ff53249bda17210649d73919912c8a11ec2dfe8a34c3f593345e411f4c50663"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f000010e280)={0x0}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r1, 0xd000943e, &(0x7f000010e480)={0x0, 0x0, "0246b0c8dceef1c393bcb83a4e14dd617f2b7dc0b1cdeb830b690667e3512e43fd2e77d83d0555df9e7cd1eac3dd2092008c2e912ce0ea529d9579a138101a1748d6048f247cfad501481d6e57ecddbfe87bae30253108aa44dfcdef3f8e74ed2842fd9934fb91b14b93200ef227a4ffcc2a8e9da6d3294cbf47c5b3dfd145803bf0084c6ec343f804d1602de6adb29048030c38a226d45c046134f48b148dcc3de5d05173187e0aec7c0396c08804716e840612225541567ea9ce2914de3d2af6a55708cd9c14cd160768441277414cc247d4bbdc9d519b0314a7638fff57bba8769d5ce3840f3b5040fc6032d6f09e4255eee022e8380bc48416c369dde48f", "86157c71a2653429370e42c8958b242cef06e1d09a445724a7289f3b1625e0b253ee1f66d0d3dafeef978c77cff305cc17eb40c6a0df1ff92ecf2d9bbc4fef8ad7b1aa34692e229360c3cb199819c7701f0d2cad185db7c27d84ceeccc58253de5a23159d1c5f538c3cf4a99ed3d0538ca0d7892813d64d8cd64f98748899d2d6166cd9ae01b20587523ce10119dd5e6532eba1de38a26b0ace843cf7006e5e38d80df14b66a2a999496a1c625447c6cba48a9a6d8c13d779048b30d80f38c9fe26a53c01160f0655890976482f7c957d5f7856357bd05c7956477093ca1caba4d39b1da80e4fdb918cc06d2147268176abdae3e551880e1d5d67a92f60e6e751fd4f7ed8bf6f302c0c98ee9fc9db419a12583fa3c11dbb49340d91ad5d91a4e96488d8ef6712743fe251513a00ec185468ac59a8abb95fde694aa25b2a7a3bccbb02383b5980ec9802c6261f138deb810954e87869941c7594e693b1cdfc9904d0efff040d415f21091622f6cf8d5e5efb28eb5653bcc5d1b14fe8e961bb5dd75cf635830fd0e4e9caa858b87d5b72f59425120377594c291863de558e8a2650ad029901e2ba7950799c603b8ecb51508be88895895371ad9220a8fee684216a1b5da5bf26ce24c69b8edb3d9ab812a45b5939f805926a34429d6179f72ff15c5983a7d67291d32ae431607513fae025c740e0edee219f1442d5ad20ffa43b98b823bb82bd11c17a15ddbec007a14809774154e6c34c41c169f07b8e0a2f5bde73e3a511043791fc60b8126dd1dadd62b339929910a198ad3fd7dd1bb17ac1d68ca9901ebcab7f62fbd844967089584deb4e82e483aafd2325e11f16f54e565ce8d06f0edbbc378d59b16fec7400bb9962ede9a3498f8bd1aafcd79554caa29d40f195e88403abcf5a78cce819be8ccc8b595043750f63c1eca4542db765f8ee00477ba4e5a08f5855c9fd6ff2059c172367d41eed5ed241ad686925bde2a9ab83a185a4f4ecefaea23bbf8ccf3a00c241a485b985645afb0623f353c77d8e391135074bba0b29cc61339e8b57558534480b51d088ed9ca1bf213870d37223b833c89c478087cae87e6f37f178c66fd023428147a6849ac5f556b5bdf8d19f7ebeb3526ae5c991cadad0389b0a9f9cab46ca1920e160b0f843d14bec7a7d7fb0fdc0d1a0d8fc43156d9d09243232e16db1609ef196bac8ccdaa4f7a57d57db458d6a273253b3c6f9cc0480d8f236e878a51874201c96f57b8afb6ecd8e18fa10124bdd40c5cdbf7e330c26e8716d1f0b39aa9a4191e3e21509fcd3f752557b37cec394b9c46197769802b377ab92f70242e8cfe548f216144ee98bbb5c8b0a6cf7126be1504c8feb67c06f28a31c4194d47f07d961bacea0fe8b1521481d115dc87727959a7a4e25936eec60af4a63226e23388e95062432204f992d057da63246d10d449af23e163e1f5cc73c24d2bf90cc6e25aaf2355d09275e1e759a207b3dd901a815cb4b2644c09440661b75fad7f8bb680d5ffde4683c5a3fcb0bebf63eeea09bb2c49ba539a4566fd75aa34701debecfc2780095eb1acdf8c86ddb99917cf35c135824eeaa89750f56c5615296e240ea8e958530ea37928b084e155c5c4a298867be935623ad5930986b7bad59a4d4fdf25e6aef561017e85343439bb4d9b946fd0156eded4e834470ba93668651b81854b184cd57b729b8b601efdfd2a1e79b91708f7fd5bed5ffbc18246d5ae0b5b560ec1e3ad84f65b3e355b982866a578bdd1d7a1919cb633edfc4e9acb718ba9c94fe4c3ad9f78ca3d6a8386379d9c82e2f146e89fc2dbf9b22a184d99a1affea936d43eb8e97451aa9d5d06661166153129aa0d788c8a42d58866c621fb392870c70b6514a38245bcdf444873fdf347563271c6f27f6b7a195503033f190cf29795f664ac6721529e20b02d71fae263f578d508ef5231c46bf284e0eec12c499e0ae2e1a4550476de0f6341015f95d36a41060501122d29d1a94e95c26757b1af928b6d7377f89d3ae182c697e2b45bdea3573d88c5f345a2b44a104fc53cb5ff1b5de2192e4e400ac5c0020c0ad940bdc2bbb50c1d8901bff9e7a94e856d2043286ec0a7b347c5418a63836a928317b73205a51370f27d7210250f32d20abf97fcf227405fdb2c5945b0cb8605bd28f0e3c64d44d9d6f3836cdaa7b5c49cf01203538bd6387d91c162d0e8bfc060c40272348abbbedcff836eb7aa4d568ffd31dd13baf62ade9ec4a69d424e937592d015aa659c036a8d3962126b2ba685dc97b97593caec113cd1f4b9277050ed55a2bb765946a2349515eb4caa3938dc3d1318cda69b6533191e0efa37bd7edcada552bc472dad9a0ad8956e10c08c1515a86a204c88c6b62ada9068b184ac7012ddf84cdf07b8def613e516f2e482ceb1825eadb6ef34a580cc7f6de3271ffafdeb816a478ab07f43ac61daea713118812a1515c95886463985a04fad6ef11ef49c2c5ed1fb7d47504bf945381eb8e1623d9ee1fa0715ee78b1a74ae15cce667990ac6ad1fc29932237037c1830a1864274082f9cc8b6746153a269d8d2d43d55081afb51e6c498795243a9a696cb1cb7d3c455e20fb11a1a34de07e010f2cf4dce097ea556ad5ecc9b124069b4d93d3fe2b4d9570414cda8f7bce94908c72968cedea11555a5e5c366ea4153c91f4e978b76f92e43bbf3de3372f1b2bca04e0d66bd26463226a709a184d51c33f6a74d75b1b8d1ebb0f256ea1ddee10db4d36c471a3013ccf441cf80dd93a7ca909e07a7d35df847290b7c16c31287ecc6e5c32a58679c0820f91172505f5ca554689807ce630e9d8e275d520fa2dc49060482c3301a16dc86b017e2d4361114c5a3cd862899d149f31a8231cdc0dc1bd313bf0695dc69bffa5b912cfcb89aa699846695ed8e83004a699ee7f705882209e143042fbd3cba51b8335b3b4bb85e3ff4807d77dba2052d8ad904b5c7143919e3d4e9d86a6852012f8d2b5aed39b21ab26649c131bf0431d9b7124bdec76e71012d22a9011d30da085114d1649e0da5b83c6b2269784ed337a72637d301132ae45109c001b4ed1fba774dea1c3e094b9e7feb79781c7111c56caf3fc723afa88977631f1654d4553d2cb8fc880a8893906b26bdadfbffde3013c4390504f7b7e541c21e2da39b7bbe2068636e4cbfc904f8dcf8fe03013c39171f6d881dda81020eebc41591bb3799fff3c11f63685f1cadede37da3dd92b18aa84b85babd84d880c5d307a18a27e92d6747c0a45ee984961eb34bcb9e647a271099e0e0b9a1976fe8ff3519347f8e15ffc2e277b93a8a0b5919f916c3008a3dd4aae8baaa7c3491addc48d6204c7ed6d249eee5cc1a7c2073cbd2717e4179e034753b4a0818944213b4922de9985411ba8e630e69f5b376365246cad4a53c5bf37eaa36724bfac15667734f3de64d89ebcfdd69cc30e1b7165ea21065073aef5654ca2d31d9996c896a1fcec9cfde607ef0a87bfcede76c2b3c771a69a2fd34e9a14691d2b2fb5f49c8d303a1e5a196fd058ccf24464f71b7bc4c715e8e59f29259152bec0dd76ab6023e3129a945da9bf489bb1481cf77bfa4950041ae8d831cd2a32d183ccb64c71df74a971e6231673cf3504b5c8c7f5422cc4ecb95a400f179cfad6b43adafee865054eb9775ed7870f6853421399e3d4b0dd2a85bdde124a29f2b7dbebd4e3c7330d69d73176d110abcfc7f4e49bab946ad91c93aceafa4922509b8e0da82b64f34926cd9d2a6ddec0fe1e48c7f2930a238937852363c09c01785184a39b8a6f48f001c8cd897973f37087667cc7838310a787dc8c0513fb2e41683af04cb9080c7f51295946549448d7c47e340e32dba2923ffa386f1ab5db817edfe1a0a152d5aa7f571f412064d4054368aee6781b34121d7d88cc8ced106a0fa09985832b97139901906a6dcaf971f778a8dc2e42e1fb042ee15f519631739649ef93297d5f96a567eda4580a4dc2bc1a8a8f2217519f728c831266b6d40eb08309d4f9e82c667c98ab50c6360c58f776f3c8aa7ec89529025af955b008ed007362f256353dcc909415bd0a24406078b9a6baa31ba160d684efdc0b897c47a8158ce0f8ec0e838b9b36836f7653784f23d76f936011bd3f7e701e340bdcdc8837bfb672966bc6c12066836feeb37d1c0a35692d19f0b5200a7caac427ffae284aaf8795481072ac4fb9f929b2d98b03100d0779663f246368c75c9db848e9595dc265397c0e712bc53b4407f0e6c210ca074515d5bf82f4c31cde791b084b2a0304c44cff4037f0724627798ab3bd3d0dbc0aeb7d4c9d641cb085cfba6dcc9f810f7162c03fce97f214e94e6c2df14264b0a256f0ef0620d53b3c4e7c636aedebd07f1baf478badcca5f2f2cf73d653157a2550bebf325f6237c4865798bb29d4ed9f28ce61db2fbd6004b89e4bba252d6aeeec9f8531e7797c577a245721e4457fe33c82904ae2bcb7f65fcabe70fd60dd723315a0fb3cd5db45ab0c712a2c8a0ec647f8656076b6dd57954364201eabc1a39b2363964c8f0e78dd89d16b58f7df193060175a8c61814dbb7ff7c7a30ce65b1c00213d16593ab4425c13650900c8386c7cbf55e20093253b6ea667222140556f6d0535f65f28fbd009794846c46591a45ed5c14bb25c96427c8b8fa196ca98476d96c8047c775a22a7cc2bf82d5a7502f7fcadc6bd53051a0892324ac38f325d05627fc8c0612faf19edacc1241562052cab6e5d40166b912281a6c38cc2588a5fc154f9369922d003398376ecaafd40e7c24843efcca58f8fd99023d2a11be21ac9660a111cb1aaea11e20404c0945c4c263a99d52dd451c7e7134c7ec4920a8e1202d765cf1ead23c96f040745c76a6f7c1d93d836917bb76568a12042a935009d8e72c1a02307ae2064a3719e27d8c8f0043a623c08ad1dea5a24ffb27234e1d0edef6c3d8cd737c79fdd2ffddf372a83acc85cecaa3a95d45298e0605b988e29fed4621ec721083b3e9ed1e7c2fbdb23c6666cd298ea938685428c2d92aba50405cbb85d20be422ab3ca512e6fd067e04317be65a11cfc22cf4bf524c3eb321c50168e19f0cd62f1fb900d28c8be5b869e350414691095dbfe76d8792b47b4af90316a1e9f76996dbba9d8510f47da87b1d73f5e12ad93b0669132e7867a9d853664f4758c5ffd4c6911334413ec21e951681f0a6b2dc8db5456b4a717954b52470fd5017c50b522c4ff1cede0414219cfcf7d58c1de210a2c847e2ca6861991db7fcb9cbcf914728f4d8ecf7b77a6dbcd4bd2c658c1536845920e608ab76e0ae77da6b9b63cf44fdeaf94d2ff04c53cb3aa97628d4795b8b500aaa220e4d815c072a39287a6abc000dca7a6b8540dee90b5a1b9b40a953849d"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f000010f480)={0x0, ""/256, 0x0}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r1, 0xd000943e, &(0x7f000010f680)={0x0, 0x0, "55f73ca241ce0629825b3918ced6dfff47299de22b2dc6a2d8a1d85eb428e00611c02191f527460a87cee928c2e5e9970ba69bfa893eba7fa8e673b4bf3c8098f94e23f973ca07591194e91a221c93ea9850b659c886c26ac3440b9af971630929ed26bf651b165783002a712f658a028c517a471092f7b7d8e4861d5bb3c612c3cc2a4a651ee9aa186410cd5de58946142d3456c5a52a2ccd6ed9894bcab7b1b8e14178c4d6426dd7d26d8042979a60fb11104bccc46720c9060e5b998a9350e4f0c1461212f50ebb8e18c515255d4c1523af9dbcb78885e4bf6639bd2817d82ce0d2623cf4f93ea4fc3b0eefc94d9ed405845e6edae8fd354f3ae6d4378fc9", "30a1c405fb61f6a996c607f70d45d9aa26905f4a35e634989f7e9bff6ea87f91f20bee06f15d29e1341a3afbeceb336b5e36070e431005d21ba4a93ffb67ced38fbadc8bf933caac741ca1db8c6f296ddcfa4656923ecafecd6a636325f3f50f9dc74f1976e60e3d0eb19a5a5b72c0c8e15cea2c0bd52c7b48e52446b4dcc857e45a3f0b5e60b038ec6ca20c8c119893bb5d995f276c37f9264f4d8c3b23a1e379c21017a480829349879ce17cbd0a0e906a93b54682f96e69781723b8430bb3d892af8fcf816223c657535662e22b19b51efe09ad88294bed83428bea614ca709bea3c034fbc46dde1329c37aa870debc5b8a23ea8bf7c876b765f222172f91d46819fdf64bb7ac42100e600a3490de25fb1c2f8a3c3fdfb4f1e2a338b6995e48cc98b7799bdc4b9df6cdf6019b686c89c42644e61e8d21e8a9eb9b84f74bcf466940cab5442a553be0cad4c0cd6ee1918ffcb59f03dc4b927fb7d63183abcbaeaa006fd8ee77e2cca44a63aa4e91d83bcb8df2f117be6df23791a16b1b9451bd08a2f5b3d9be543e2dcfbc525b77c3e02215bfb445d464f828ec3b06da3591068664f371bc56511c43e238248cdafbabff847bdae88b1d8cbbeaa50b69e3ca74ab80eb3847ad6a9dcf021aaf09be52c46b896667b407807f7a1e66192f0c757fd408a9221199f7a8d30c88a01796a26dafe9cf8da2d54eb9a8cada7b5ce9e8a2939df8fbbcf8780a0d52c6ed34287a5e99d039c6fd5a1246cda45f44fbf5d7f037f02aeb195405f5da4f2eb95b66361a00551d9c38021509c50da522e26ecaa97dbe64e8e3599b10b1f789708a7baa3e21513e00057e1129737094a99ee59e835595f81695ac73129a76c41c0a2ed25b0ef436550d86176168a3b90b2adf83e429d27e4f1af9f3882d327e8c9e86f387c417e605b9abdaf8e86dca24212f077dbcad0ef27ef14981990379ab2f48118b4cdcf7323bba24bc7fa6082b1a9b77c2fa19fa61e92699cf27164518cb31c5265fd88cc065c3f550c216b6f67861a2ad0377e8c718ecf3ec2286aa78b2c2c8d319333016590bbf46f65f48ac42fa0f647dc68d4f10351fd5558a4797866a79533648e47af42a62f022300b09f8e8010d0d289fbb2921206a992307417014abf6ef419db31464418837653990050370f5bdc0891961a6e0283289884d92c02a2533a1a8efe0accf5376a41aa19f1c4f13571f488ab7b1c12138d37bc83a7c0b9aa085203456756065f43dae7bb668779ef3c56ed23259240d2f5ea554739769470a6db89cdc3ce19d73c8b775d86ecd5cc88fcc2a300ee7bbc3bd2d979203c09779724d11cfca84c8387034fc57faf6bc8cbcb5e5aa1e87c806e35156bdf2d74350ed0b12205618d161c88161ad1cefcdc9548c657fba01231f7e69126e3b42c5c2c22b2d9c7de41cd95d8d85ad43c94e370ca5ec6daa2702292e83d4863da60ee9f8a176db2dc1dac0fb95b07a50df25d321812c285fc832f23ce015c84bac6c97748f53c2a9a09a89003d364de5947fff9863af4c0c0d1594c0065026a8701fde8d4e2beeaf5f88f30c05e4367b63bec32b9c64e858b6dda63c914ec91a7ae8e391e645e4caf78ec6971876bd116c438acb7039895b6a3bb423f609e2ec8377f41062a6d44762e91bd48187c23e40e7056e569cf6784d9395fd3942f082adefae068fd1a941385a2c5766cc7d3186162d7188e3be43dc1e3efd8d3f2b295505b0c311c26cc180ef6cc0ee7b3641d70015ce3c250b14de4503df5420e8a0a4b3e5af59451059b3ea90e9a38a4176b8fe73d1b5aa1fe5fe7e1f5208bccdbece162a2f3ee1a79167a7a9325a9d70fe00a128e35a8a9c75fce8f0061460e23f773efd5fe1e6ef1daf8a48a93358766bf9b0e47f3ca840b696c2213378eff915b9adb32faebdba0c530474dd6c484b350cede56cdc5e610d85b5123aa38b4fb0ce9907a7a6908f4a48d90db2468a37cb288711824c0f66053591b2cdd8898b68085c85287e859ecbe3a2a1edb3fc0c52e241387d29c34cb4709473a3211448ed637488ae5e3648c6ccc337af9ed95e4095050ef69047836a63702ea71fa04889cb269b4da6de581fa8d15bec7943a7a2c848cb1e95431c90cfaf4e11156213a089e16ece6b1962edb1520ebb542d8fb6fd6c42b1d98a53f81476a7be81358d020d3270c0e38474a3ad37ef544f5fb6c47b5ad2df14e8002e596d8afa8818904741518e33f67be0b5465eb17267c27dd20d409289c560a0f141ce09b2bba194b830bb730fadd82525d26f20aeb92533c263f0b9581c1b68e10f7d33f7b2f386f144f5e687e2b88c23e745b1a7eb1edc2ed5a2ba88d4cef0577c3a16eec70ed6e337bffca0d60fb8079fbe91f003256f1fe807f4e9d7cb92ace07862a73edaa47c72c0ce87a91772ac3d64a07887588fabac26a02412468ffea37200c9fe68f3d8297896f43c75127bf92907461eddb2b5cd56f10c872f3ac25a5cf452f55b302b1abc592dc56a8b7e7aea84cf99d7a154878e4fc46a6191b080474f47fd45af3da5ab72d28383cb8b5240ce0d8afe6614183aa4f9a3a1c751c1eb347c947a890cc92b1ee4a281aa811ec52d7976bf5967485720ca59028fbffa4a3d06934f632185f2efeccfd6ba53dbd1b7ac3cfd174db7a38d28bacdbd24a4f2c2296a86e402d98e20f839c4adcf88b8af8e9523f186926cd941117e2d890cd84bc508ce5e546cd3029c6fc3ec9fe2c5e349473e7acf941c3f384917c64ecac1f10ae0fd3b28cd9b36a3a691d8e5b05455cfef8a10b06a582dabe50a24f8614f9f4404d7085e45b3ce46b5a291d7d8b124465435331b603afbe303a37a470dabf6dfd0aa46ba11527e709225fb1d65fb4aa84e02ae126c533a83f0111ffc12ee8da6e3e1c504c90f3daefbb2285d73b8f6ee205b854442b3a2031906ed2efb80e8edadd388323bb3874ad8fbc070221dd7adc03b1d5155944ab328be1179316dd742db8960e248b6258be3bbef4111090592332305608dc8a4c97622cececa829038e2caff257f286370ad2f706a5b93471189214b5f5dab6e35e13af3073c13b338a6256b33d821a0d0bcf23373e7bd4bb38785a06c94ff5dc7fd10b42d6eeb789febdbf7aa8b072ced106e3d99beea6fab2f13bf9f1986f353bf0e72e762e44c08c3757c55a651df0f63722e0c3d12f78a62e105dd01ff3ca215a5f90e12ee9fec552a196bdb5cdbd2f2bcaa19059e9cfa053fc92ec1063b563c24d1db95c8a28983f27f20d23bdae39b63bd5c60b36615fe738d722b693450f48ab1cbd07481f4561433afbdd8a8de782b81e75702c706cb608855882cc00f7a12be1acd176fcd09f067370eafc20f1eb43d8c63b53d701a7cdee771987b8fed756b7088605102d7e260ab0c009b274c42ad200f988f1a3748de176024220711758032569010db8b122e158b7b8f11b68bbb11cd6a8c29f7ff8cb38af8ebf9d24cb22ebd25c3de03eaa7265f84eb2c9eb7648fff2598b59c0329ae4d1830e6bace93e18b8014da321415982e397f2d176cddfba01172e12bb4fc0fc28092f08e91fb3ae489d80059c03572b1049cab660245dbd4f638e62acffaad871a9424eab80b74daf7a97cf7416796caf7a9e1e714098230eb98921393a3e978ffe6c0eec05b1facfff76aec07d2d226fab0eaf28562897cf5bad52a8a3094e9b9bcac81f2738e934ee297b0111a9aa7079d52347530231617bfbdcfb702d38a4832b2213db0507af05c2152e081a7f7ae46774215d3df287f4e715e5d27a25fae03b52e3a70a9a0f8863dedbf7b19cbe86efa02d1e5a3502e3dd017a86483aad52eb8d0c804ec7fa4ad67b358e2d56ab083cfb4c18dc337c3b7f701084b34ef034e614e6c862e128f5e7a7f9c29a8d7b189dd152b39ee75d2919efc7d7373449c6e60680a731a0e5449180696e7d43062504fe3c204e3f72f3c6086fa2dca710ecb99bc1a2fb3c74c9524c2217985b5b408a5932751c95f4aa2d890b7e0801dbe4b69f05afc6c667cf45b0790e84a3b570f4a321ccc9931a48a5a4cf1e5d4d2629dc68feb9375781ce10f0043f60cf0c48bae682c0c782d2f48e7d67a40dba0e35dd9760650c6ecb9a142b04fd7d50e652ccd15df991103e1055b95df53ad102cfe3b02f3e1a454a974a586920ce57b078d7ce0af21c8ab68fe519d761503468bf7e90dc1ecb2babffb47c07ee072336ce54d6c0d5567a2bd15c8e4913f536de53f71452031162488c58fd962b457358a2a9aa4fc8c96c6a1a996c35eb389ccf9b89a52965e8f7e86ae1db12a92feeed4ad24eb231c40859a7d4118934972cc1c5b229b95f1c208bb0e3f4a2d208c4e5c3a6a5120aa2ed95f013227ebd0c79ee59ac2742609dd497424d4eca6500eb99cec62d6cd569d7dd85d1f0f049faacacd489edb2d57b1220aa4e41b816496d86e0e03c8d5c7676bb0020cdffaf07e2785e8756774a1e76fd6ab3f293e110ad34ac544ee808981b1844bb5d653384fd5f5251a8d71fe4611fb4cf16268b279a8c116f0c1647c5e990e7f9e30b726f59e37b30e6e543eb4168effaf63b810bd0ae4de2c5f3de502ca797dcbfec2526bbb651f078a1c5dcf58fede67a966383702b45c828619005a9673fa24ec01041306383f58d514a52d8e4ddf97563e777f713bf4bb6c3739aebdbc56c5c515ae3dd41d8c7eedef109be532023199fa68b42250ec10e429592986bfb210489662108f9cbe88d6d74ebcbe38d980ce5094a77928c3b0edad147fb0db122152601994ab28fc546c49e864f1c7f164b5f7d8cf0c1b1d2b60df114fe15f6a2ade484bd5a4a408ca1ef8888eb401d4c52d5980a5322f2b7c9db93d013112af3cf709be2fc81eb3b59feaeceffda5df5345be9d74a7e43f57375a0a30ea9b5698e3483c84db4687a6dc55daf47a7b1e7fe126c316ba29e61c3b7acc6ce35f76d62aa394b1a068bd5432b003d1bb6bbc2e1ad69860d0e73234e6242d61b05d597447fc14496300ef5e7568bd54040a1881b6f9e6801a93b52008f6dd51958d9e7d0e6b42be0b6924027d58eaf36db9031744a688e1a9b96af98b4257b259fafbe7c2d94ed69962c2313116407db24c4f8c72fdee979bba376285626d155e67ce9ddf82da60a132c315474b3d93055ef8c34ce55e0768594de877898a6dff6f090af7ff6aa5ec683eb8c051a2190aef9fb558ffd49a31df0e59eac92662edb534fcb0fd211f8accf19fa2a796877556e79be2471878cbb4818983f5876ac85ac5ef57e4696842ef821a858f0b2d3a8f80711947de14f3ea494f7ba3bd45a57d87fd7ad2a4ea9ca39f4d90dc623c428f9f9f143bae68fc65acabd4bc31b5550620928153266d55f501c0a6e56a3e5dfc4fbcdcf976bc0a"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r1, 0xd000943d, &(0x7f0000110680)={0x3, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x7f, "80d816b282f178"}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000111680)={0x0, 0x0, "78abaa106891ec98952476a1fe82df96b4f00c5030dd94ed1989eecdef254e65c37e5c2947264128d6ad6055a45826e717a21635d1fbd1102f46342648561a50598719a3634e8089f84c040a87c72c3286db56647c382dabc81ada9cba737ee62b56591e7cf2816625c3a08a47509806be4db58948084cfda6524703b9ff5330940166b2f7740d659256b02ced9d5b4a7130ae408a8dd7a7d40f63aac6d4965110b46fa873889d8d34e73285b82dcf380077bd3de7937ab377abf3b3b1aa226d50d2bee1f4b8dd42211e7438821ab3fb2131da1a504940bc0c6e95b1c50fae05c47041348f8ebb98688a911fd816a1180e589d906554da661e04b0d9c5da41f2", "d56ba570348f80637c4d0ac014db8a121c2520dc51c8e5306deb9fbbfe2348ca1866c123dcf93586227f0020b0ccf265199293a6cba1299e526d0eb5484711b22922e2cef23efc67f6b9ce60041eeafd89e7a51a108352a57b92e12f52fc8915989b6e1ec9c02764e308aea0421e52c74d0c2862d5fd408f9b2711e285b0263ba9a7c07809aba6c539c0c7a886f0a076cb15094249be52d44e4a34bba9c9952e99bd6ac353415f4563b43443f602e542a607472140e22047c15ca34692c2311221df1a012004acf42df43c626abd6654faff6769cba61cb8ba8cd1b093dc51239317922663263f7b9145040ad87807db2febf6a2d9fd64eaca3dddda46e43e4681ebb0e1952e316cfeb5d3e92b126fb4ff19e7c7e9973601dbf867c53fa95af4842d25bcbfee33621f113486910bcfd9449268c854b82c61fd3e3af909de0cfca4e795cdefa075f3f4684e5d265b3499ec19254e9a99ebada789a9c7d9afc9fb651bd33eaf35f9f4a3fd7277e44308da4676305c3853aff45309561363c17a7b0f734d5039825e9534f34df9fa27c65b0ca578d1a74fe0cd0d8fda18020e0943da1a96923a96c35c5b157666428420f9aad2eb158c6e1cfb00d7321a6f4d5c04be9889ad460382ab8e12f4aa04e078ba5b2a2b17a8724cb0ae2d476df66b23184b6f0dccfc225648a558e81b22c48b91ad20ef3afa728aea4266dc551a8de8af4eb78884d7c12feaa3f70d16dabda17c2e146564584021e41da9d8403afed151a8b7f72ae8080b6e83f5ab63e670837545cd6ee1c92e83e5282fc0394a024c0468d24d0130de4a4b3914df5308617b9c61f8c1fbecf4504eb9ceaa4050f75d0a04928a5add1bfd67fab24abea263c1e52086f72506956567df43b8719b2d0d214108990c556dd9d274532907e05626940da21a19f16321b00343a996400bd9f07eef736b0fa4b6d686dcf0de36ec0b932a7f34a7168d3b3eee3c45c962cc9ab99519f0b4d6b3d2ff03af787438c115a911e6fa5fab86931ed2a0f4795a2b1b21611ba234c24f37857076d6ce5d97e2ee2e54b1578b4ef6a493d91491e5b8f2254e18e3437b59b8ed9e76aad258ffcf8eb795d8c966f808f146473e9fd80bd0c35b4816866805354fd95a0b42fda052cc511021c2d005c0c6fb55a63f403894168cd7f3d10808a9152b2c3998b251dd7cef5f7527fe95c9148917ee263747a8fd8712da56291d614e048cb8d653c0ab5c23ec5973f1131cd86062788a67fd5846134542b7133af7857813c94a7d6b0751b3a9d0eec481ced710ae4d33e5ec52ca7ba26f09b96656161951a0d0f275a5c609a43d0e04fb525e6030a4c98effe771aefebee4dc1081df3c61b4290c6ba6e12fc575280885a826085ac2dc36b33d241d5f3734e2091cc8111f99efa1218627c7daec4a84a21c45154b8ed8dc66dbbd09c4e7eced4faa51ba0940561605b19df73a2e6fb19755b8c5d1911a20b5048e2641431ecce9a397033bf1428050c62bed875fbff9b5b5e37c21c802ae20b89d3b5fdc1de4274d661ee371c903cbc942377b12ee4392f3ec7c078253269b012c3378e34ce82e7f6c0c962dc8e4d8c3d712ed7633974b027ff8dcccf2d4241253141a16427bfd82a0ce0bc65ffbea729d1381270be597201fb4384d144dc96f6f2401aa00cc0fcd08ebf00155956189b4c33d9d5bfe0ab510e4c392f1c74f7faa5894fe1b2a02634c11abee462d50e508f8a524c47942776022f4d24f571a8de8e9c9d1cb96e2571c7a3261a3325c474ac63778289704ba1f0bbbb6dafa7a1205163b45fcee3b79a322052ac5c5e7f9063d3d6cfe79288d998d00d2a323b0bf46b6e8ef0423849759673d46c9db355e157521a0eea560077538f8b54c6e9c7228478fbc220f313b255952ef259a910fe6f2edb6686cdbcbe03f32755681f6f0185b1bbe8e47fb80916227dadb53f529b9c20c414a45f11fc5b44a3606d4b59c1f1f2973df38cc9bdbd9041fca9327f0636336dde7473cc37ef6754b06978f01fe7507532f148fad4b4ceed1ff662a465ecf9928a9969624571fd916399d3d5e10c79e94c7785f59b99c9c99e6b172aca396b108f02f47dac3af893555448596f84d3a5d37ff71c7f074ec3f4fba5e6c4191b9b7964d65ba68cbc76646908427c3ed3541328fee1ad371ac8ba850a0411ca954494af7409ad9d3c6ceb8c4ad59a705fb21490efa07cafb4129df0ec991ee0c2c11f78accaaa0b32a197c7c18597c74ac5682f8c2281995d2e7566ef4c33a0333d941c52b765071e82bcb8c754e18abdadaa3b7162c86ea7f898b5c3393e852898b70259db44563f341fa90588450fa052b0e34e8892300a58db7569be089a67fe96d95edaf17fca106cdeb4cc52cc24053c20e94a2e261673e4fd260d2387a87f597c2077b2953ce42f79a685779d6b3e611d8150231ee4da2fb12601471fe3dd12b137ade5196698f6e0ac3c6c1bdadf384edb1e7a9ce75e83040b623aa7a754643cf1f917c29c1008c192ace91423cba84a61a6da558cd1dfba81a39eb008a913f3c44fefe97049d52f8d04a99b0a86f744c5a8dab84c10dfc08aab5514d29c09b8b865b2abecd9181fa9bb17ab8def7a7d9e1e824d214ce4cd8c4631fb3edd5ed86b2e133d44bade2d080fada754830c87c3ece99559badf1b3857b8c5618d853cc45b0ba87bd23ac3c1b7a4ad0418eaef4bdf61c83a9bd1d2fee6df9729bf4d16e3b5ec11cb6f2fb53c9c3281963a2f89ed013096aecc7a15efa33b81a79a8bf9dda75e215d6bebf45c2782d5339153d924299de502ea996edc421198618fd2b03f8dbb67b245376b0ae7f349a721d09f8ae56b855b5332396fb272af79a943ac0b12fdd1e1a7c7fc042b598b1cea55ad27c6bdd6afac067b75f899d2b6ae6501aca728d7f6f06ba38062b00899452099f95fe9b9b31ea4fc5a573e04bb8989ebb271d3d35ba1751819ea2038cab61bf0c5376cf17fe1d4cdf66dd43347074440d9349b3ec385b5ed100875054f51f64dca18653df60f4fd82129ca32fbf9227091617a1602454719e0d2d81877761e74ed40fabfcb19c3176cd0a8677e138187a9e5da9469d253a36886be2a6c81eec939755f394040c60f01603c65bf3a46724152085f196b75316ad13de92f41c60d6c96160629e611af2d9abcbb6dea93bda4788fcd2922839ee72141c1fb12631148c1bdc5c09dbaa11913826b483dfb2f7a8c6535a5c821e6f5c28ea6a4b5da2cf629391455c694ee0927a2f20a76567624ea6bfabacf25bcf0ed96050117cbe661dfb60da226b181b4d78293e5ac066847ee695042ba0f5138b66308e3e3c8650a3d0ecb31dea5708a4491d79d2ee30de9dcf9f91188a3676adf71f80eef41a5f7a39a280093404780aeb88dbe6345ba9da9f1d577db2edcdcdab4d4b9e33df8297c29c636e205c1b545e7baa4712ec6daafece798d3296cd2848c3646e8912fa63c230be15e9fefe99a81ecf7cb6a59e75cb293f41e6dde1ebf1790f8bbc20a5f3ae75209e876bd9923969f76b48e9a58784e81d854a9d366c247b0c448d9df9d1630ba690af58285b6c86b81383946d724e38924e052f73d5b95289567c7d820bb1313f21fde119a2144263130afa7d4545a968031cae1159bfce6935e5f8a9fc2df7ed5542e01ff1275f784e3d7fb642d477852e981bc79b82129b5c64b915db363c4a384e36f13ab65dcb2279fc378081ef062f9daa99bd3dfaeb1e2dc0ad4a7a3a9a527f4195ff4a238a663f0ee5d208b91287c46d0aa40b741d2defd4f1a8753b4e42cc789240aef605e43def06a0d5f7567ac0a2fbbe58ea260b061e40e47a16cc96da11fdaa6b7d33d9b396a64416af691a74bbfe86eafc14efe1924624c9ab89343f69ef7d8e2038a2b3c537bd940f9a741026780713c1e0585397bdb5338099bab6c16b41f39b5d455d083c567aaaa9cccbe5e1933e124fd6ff172cc265e23c9eaff646779c15452dbda093a390a3c7f775138e2c81a7378073e1c4dea6a747ca1e0e88bc2475ad1371c03f31f8008a168e3052340d46119e80510bd21e308161d8aaac00c136967b0e5b61dc2e9198cd56be93192520a828c5495fcb12c64c6f7d7e3d14ed5da25085900f540926e78dad4e4f6f5f9b340625a52be032e3db7da0e0ba83cd49cedd947452d83d7a67ef37df04f8713c35426a8e92d68d881540d2c84644dcd1bd153ce5bc0f840b2e56cc50bef5caec6b27f41311e0e697b89d1eedc5e90ec5e3484b8a6de64221283c210c369acbdabf6c81365df959f87dbad12c7bd783bd08f345c041bc83ac0ecb5184e9b404570ae758576bff8739bf93caca7f89b20e8e057b9e8cf7eca524237c90e137f0aba0024e63934118b1424f634ea6c89678137f69196948c6c42c9268547bac3830ba3bb06a91a33d9f90396eb78cbebbc65a9433edc45a0bb17f86904b7f65524b8c13f99167130d6db295af7060a75c8834b323428fa4747ac9681e8ed7fce3569388482e8c3c4cf67b296dcd3f77ced7e97285dce8ff57b7117499677b3d9669a53397a950881488b2b525291aba563d5eaf3fa20f13c694b8b3eb6c85843fc7efead6ea2f364a1e5dad98625009185453aa36d9e51999f91f7d0a2991d05e2bf3d31c3c33e3219163f5b4b34de491d0bffd5b87d632f0b0723d5ccdebd3c2a9c90f1285a5bbd24f43e8dc4f2f55313731bd7dfe9b82aef153334ec9506986c6fd6f98c1c356a276d9638d6b0e7058284d54c885202850b3255275dc56ba8977d0661627ce9cc95f7ee353d3956e4896761743edc10c4de66b5c1f4f4bcbd10a9c5aac5fb0293d47502f7dd6c21b7a98794ab38c34f7b7d56528a93e9de24f9309a795e720364bbc44e777a74bfb72916905488860853dd60c095ac0024dbee0766ac491af0630d919e91e46673d732de4b48ff9857e1aac550974b9433fdbfe6624f99f6e21da12043531288173e1e218757ae53e395c31ac0bfbe7d1b0a44d5b785062ba17fc7577bf5dc5ac5e1a2c0987476f14715d88809bd5b9acbf21c6132a8cee5e64bb06f14505a311c7a57fbf06f5ce942ac600a610399b44f9419d5f25d08ede8119678f722013b35f2e690f48fa05a80677cab2fb70c306d94b1895ce8b257e7a20ce6df77ff0b3d978926a152758e35149d72bd0673c889ac6527dce6eaeec49460d7a36269ed8fdd9601018f838569ccf616bccbf5f0db3c71b9ac7ad46ce4da8f924c8775006335a621373c1fe30e7fa784fac3cf254832c2dcc1a6fda2b9e24cd107bc0e77fbd131875a95888ecbab8b05fa9359eacba880040dd45f60c93eccc420479d5609fc80fb289668843e39ba37a22c4082565ca756a61a307228b2fd85b19d93b5fa0888e"}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000112680)={0x0, 0x0, "d4b416dfc8fb097ecbf25ceac66c277c267016076baa4442bd924c96a3152608a86f25b8dd5a3a15147111ff40132ed5c2669ed1e16c876bb7de2cfb6b2740703f41e0f35083200e8d41010171ccacc5d48707102886524ce3df085428298d82bebf417e08d85c6921fb5b2508310317d9855cf7d14eab0eeb7475dc3784b23c01ccdde8ab4112ccf93d375b523dc2d99d0d2855641ff872b62fef20a7ee894338f031388268d612d9081aa3028ce9fd9b46267cfb7da22adea42f003032302f8f9de30dee79091f5e7322c8e17ae08687a53f8829b8204343162f19f36e4c511d10648fc9adb93657794f0d51bc7ce37c3c6440ff67cda32544c57244cfa1ac", "216651065ab40cb61f7f5e3c70b70a04e3098cbf9b5a8888906e37fea1e580e2d94589aad67f7a81dc4b6bddeecf7046278741dbbbb93c5b6ab86a6494e22f7392597720d6827c8883c4d130682e7887bae78e5e91ed96239cb7a3303ed36fccc2bdfa9026a3ff55977c6a812e046d10eadf5581731b7bb9e4e608542c2900e58692720f4f021f48df8e8e57b7d3aa0b4dc275560e2c8cf5fc198b4966c21b05eae46119496568a7eb3c720aab2b1e5dd49e4439c50f4019085b8838c8574978ff7880af30f870d7d49918a1744cd151c66ccd674e219b6ddfd99baafa854540dfe292c16496a930e338279e58fd71d0f5fb5123f1f62ede271e4b38a7a76e602343f56b553f63dca211b525bafbd1efb75575ad3c9ba32faad10e5b45181872bda575971979653bf87939468abfb36b26f3b4c48cf368c4bb70cb3f06914a0f54f029c327eadf264a873739ebb72e449d0551f1cfad6689c2c7fbfe051c445850b5f00c2a844a81e909df2057aee976376c013c4db8ef96f9ada372299e2926ee45196b95e52c7bfaf714de1792c7d2c7f31adec62d2f1bb407cffe97f8b49e5cd0e383e44f2d8d66c9c6511dedc92b2d7137adfd296dbdcf15c9f4c96a6c9c65acfaae5bc06b66223a5ea0124db341494d248f9ba46e4c0f584c54a24129b9fd077f9a608b58f7bf1d9c744808ad9aa5ffdb04779385cad7d1c26bd7492eaa535dd263fdfa10d2e0b40848d3edcdea5ca0e61f49f96e630cd441e4b558dedede305f940cb0be6c1124b295e2eb58a09c76c6b9da2fa87f2e67c68a649b234234d4adea1c0542751577ecaebc43f58b0fbcbdcde96cb0516a1928e35c673f4faaee0baca5dc63850920036cb610212861dc748b66b20cdaef21088105d890c2bbc017d8bcc73acce2648a914299e551ad2f21d524d1409633645ca8c147055a2b3f13f16142b5b2df8538382d0f806e85ea2e75707a73ca440af904248db98be927015d50fd7f05393996f7f8377a577bbb2d6ec9d67a4f4c2a8e8ac912333ad3bc5c7391baec4ddb9db003284d4d767c735d8dc6ed8054104a660236640d987f06192569e03a70064adc6d96f5a60249979697723d13192d69ecd09f1c66cc63dc501618bad143fd2575b63086aaa44bab97adb6a81d9e45accfac1a272c38832e372a4f845f8cb856051f88d56b193d85f89ee04be36a9460b093552aa319ce0427120c4c4852631f3ee3de2e57f6a51f9c4848480c0d57177b4985ce83dfced7738d05cbed055328679240b943b7b66f85a3cb258cf53cada490238ca83941d7f349bf03f88a1e6a9e475fef8ba35f7d30e39ec7cc787adb843d557168d02476f758f9bbfc5cd4232a50b30d03f1f8ea644e15409378b2ddacbf96225d699d9526c354f65274466009a61c91e4fb2dee9a23a1c1d6a054ab9c48fbddebaa465ff741ddf47ae71dd79b84efff8f3a09b6b4164fd79d0a86d03ea5d703910caf4446f258dda336538b09fe1e535db01a876bebba8607e19c497a18f8593fa8acca46f8390e9fdcbbe22118b5c7c2d1375f33aebed54e375e7ac1dd68b780be7710e9f598f11dc41e9ca983812315ffb9b424710b7bb05526a503f33bbd0d352604b4a9f906b51d3209ac5e34dd6f1f5594914dbdae4b1b6c1ab3cb2d64fb788c635aa1a90e9aa26d295b61d41917b7f6ffa4dcb5705562a4464efbaf9e3f35926b29d6bf014dd5997e473a4d769ac9ac99025d0b0695eb7edca42f2844d0bfb210c32116500c75dc3c6eee597b730a98be6a4a95948c85d9a309caed22eb0f3a41dffb665673b6268ce3c9577b4c719475d467bc50bfbff117dbc011a98b78620b130514cedcd7441cc837dc525b0dc7ddb310c8d32cef60e115d311528e316d77fafda5a7d4e37f4d4da36645cfbb1b09570de5ba63b02722e4577cc26041d851849c8ff84655813ae858d20fbaca560ea85b33ac0a4a2e0a6c48a0e8ef78dc09cb3d2cddbfae7b9c5b347ae56258e1bdb26b5116a4fbca01b6c6856015f34e8cbfe0d21d3a037272393ba1997d39fe84023bec8d792e021dbdb18c7732e6bc1e11b129fec2b882e22a2d290695e4552eb4711fcb0fa4ec15c7cc09d15568a11f270729f80be914e56fe5e86cfb872f37b48e46a345492281cd917b02b5b5e28c072a4e8d1fabf078ea69b978c54b8c6a247ec332cfee91abca383bbf4b71e88bcece22b8ad93e31b678fcd159b8463c6592945a7468eb4f0dc5e5a39bcf8a2eea107061b4873506a883164f54dbb20089b9bbe7c47298e1b95488052ef161b124b350260a18188c3a6d4570bfd6af78d67d3732d02cc25c0dfc05c631b690e9c000038d7e25dd537098b9cfe980a0047f485c433be5bf8f5b4538db98d0e2227b6eb79f1f35fddeb1a6b8e48324d52813b7db52054987e334fa1f32258f449e77a9b2c095297cbaca13fd4a277eb9be08f1d2dd954e95e86d665ea373bbbe90ad7ad77d4e842ad9973fca979f0aafc326de4b575147803c8b19d49e4e588eb89d95d9de045b3737ee2a1aa92baabb48668bd9aa4efc3f13e749a08f9ae57b6b640e37f1008686cd8e618aed589f80aeae9d9d4b8a42b70884b90d39ed44d9426e87079521d23cb23de35d9ce4455b70d39369b93a0221659347554500587fd000401f232326eb671e4edc4cdc3848bef7a0886c12092986383b87ee3845f750b927ba24f0546e8edf7bc12b770e347c1adc9d465c6433feb0b1add2a300681fb449a2c98163da945991b995d75a9187a359ca5dda21b81c142962eee890a9ed71dca76fb76d5d2668b21b0adecae02f143ad47365480a238f44227c41a53f5661ebbfbec88cf85b9ba504b4c8f1c799b7529c31ccfb244645ff95dbe6c3f37bc133f01f07886eb3c045236c61de30cedc4c6ea160f47d0302f6f19dd7976c9c63891bb5d5d71a61c6b9da09ddd03866b2760af8fb5f57d33a5efe9033d2a1731f112d1e0a1b810df6d33fdcf25048548b52e13bf9c40a498b559388e8326e1bf3888c3bb3e919a01c2eeee199c92f2217ec734c220c31981aa1fad103eac105abd9dc0335404ae00f351ce1967e826cf34d823e8deb937c44c52f5cfb013d1e8c9b3e41cd917adc71d70102ae6500e1c96031674c2a5583e8e1a6d887390bc64b037fc475e2adf28969aa22b59819706ab91f779c79c96b226e785cc16fb4b03f27c842a9429b8acc39140c0ce1b9aeaece2efb94881c53a0d480ad95bb2442f87ee1ab77bac9e5ecfee418f0bbfc0723ac9605688f7ba3adef7748fbb67cb8688f1cf3caddaf4726f041190c8155759e4656b1046bbf77d8156d5dc58c70d5e189f3497d1628ef1304e69daae3a5d8515a787e659a581c27db718e257118d5fa6520f58720fbe873a60ecf23a38ad497cd22cb37f30cf0e9bcbe79c7153483305fed73c2a8d9cff9958f436084896f9339255655ec58cf991c6f0a367fc0e0e3fae2b400ca9dd15fa25203a2467200b1e6546853fef70e23d71c016b1e350121e893648e279173d2b53e7310cff80366c36f81dd9f68e63e1147e278bce8c2a824de1fdc0b0898e6e399ff6af5dbdf8ccc7e14b6dd314c3c6a0bc70c64f16df84187b06f6f1c65f55a6a58082732d79cb2cccbdd2fd6ee1f4fedccb9aaa96de3db1c4d62c5b5d465c8fa4613ebe590e34d02823bd5013e8b497c82be3be7b7615df51f4e0cb75b2cecca364f9d1638287dcbb044a15c7eeea8d0dd519e5f670c41797006c44e18514b37e36c663fe522591681fa0433feef6533d666903fee5db38308925254c3bac3c68c5a0f7be7093b48d8d32a8c041ef835c32612079880c0573f71d6fa4fa2d8e9515a7ed44c43d536ec780b141926f33b3a46105265993e1208416d9f133d02b8a5da8a305cfd9524b570cfcfac15dd86326dd1c5a3ed42d694d2188f681b4af2ee50fb29dc103a536bfb6f775d6e064084f09814586acb0e4bb621b9a3233d7cba2c1d46cade39b8d3eab4f2f5579c22f19103ed8ddb0c8732ae7b126057b9729354c32b0416f352a8c72c20d842de25bba5c89c14b8bb8fabc4d75fcd67af45796477ea8675804999975a60582fad9a5fe163e00ea71a29437b1e87397dc2c1d1994da583139256d20f7277fc07c8943399d92ea8743f4ce16fa94b6aff207e3067bc15de34a474876441da3cd918119c18dae56d710c74c5ff0c099f08f585e0a6cbea58f2f94eccc9c9e3121b24570ec26344ceb164cabe1d63f397bf25b25eedc0e4fa32bbf959fb5e6e88dffb8c8fcb0f1d70bcdb18f76e4a8fc9419520f6c6e73168dd12a92c812ae818747a45296e21ec1dd702ee54269940d6ffb019e2cb72a8c64ebf33623abd3fdd8a3f4ed7c8612e68a3619e470b7c28d5ba860f33c1991754413a9e54d207614071af9beb504d95d328f5132eb3dfc507cbdcdd80b1819828d126d04688f6238c37736651ee65d5e2d80d30a8f8a3c055a29a059287244b7cc173d5f66351fc96895b9f7b7c32bb895e2fefb997b556ddc4f20ea81f8f6a01e586b188b660d7d6a47bb33c7c4f63772c29b3b2941bacf719a35e61392f031ddb4cccc9674b7af847e9d2e78ef632e6aa7c79459117e2446d5307b37502587717c7f6c4c25162d2fcf30c18ff1dbf32adb5b49bac5012295c9677e69f36e616ac05b3ec11fa5eaec177584708ce8fb22934421b038b6a9af16ddbaf98426e2fe30dff566f4ab267ca2afa5ff32de831f460b784cfd9c07ce032d5195208ce1c0dd8271f16db184ea340c86d05873cf49727df607d517f7c9b648dfcd67a29a60d7c511e9e2b2d231e0c61de28c5e9f2e3ef62866832a946c9b42d926f2530a41c90e11b6dfba917e1d59b96a674a2168948b6847185b2545ffbfef43ea736d28ca0c8b6e5c89d99bbeaadb460aba422cd75228660063e3f75f9e517b456d5e90765c35b0cf473dca4a4033fd5193dba38048047d7fe84e8bb09bb6c3524894679014e78c80903ea0cf186a19c93aadcb664a9566e0dbc62037b2363c514fbe3d7124d28ed769cfb417e3280b17494672320cba2e82b9957a457680f12f32782bb67f11be4f6ad85bf587c9e12ed011f94bdcc0cca52d2719d8c89d820e8181afdedbab2d33e5290686cc2a0287f797ae716cf9559a69459d3c4478f5446f01a173044609b5579f3f6e09dfc1ec28800b8ebd087a7df022ce327ff2d40d18c28a21741ad33a8002376188bf5f38d9dd415bae7ec9786c163adf1095d3fbdc8f34127ad888bd2d633f3fbd74f31c47a5fc79fdf7c86bac05976896b5f35f8d485499dbbab7c939265b2ec5bc38d8a39bdf6481ebbfedd4a1666aa469a6410860f2e0e0af399d4950e344a75379ee0add77668bae63e2ed6c8ec0b999620f6a"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r1, 0xd000943d, &(0x7f0000113680)={0x7, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}], 0x0, "bfe018be7dd971"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000114680)={0x0, ""/256, 0x0, 0x0}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f0000114880)={0x0}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f0000114a80)={0x0}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r1, 0xd000943e, &(0x7f0000114c80)={0x0, 0x0, "079be91e0619ad135a2db878543fe83bb4a3cfb584b464752ef3020b375b78dd8a818e8bf485c555ff3ee82714a0bd3aa7dbaec4fe8e62dee6f24ec9015f2f215754de313b6e10cf2f03afcd56e766e89e96a7eb695e3542aabee13b5f84f10a7e1b84282cc5a225764039c15c97ae51a6a50eeeb4c93d89f33fba219baba90947b5b22a1324f12d32890feb888f8e11c95d0ed8297083e24ccf9d234db20675c01c14dc1c55062fe9c9b7cdf3bf710b0db162dce592d8b0103d83fc1b1b94533870952208ab3f4895e074b822243f1294bf50082ec44a50e43d3bf2b4a0d4cbc00b63354b6b3da1d2362233188069d1a96215bda8a97f3731a3bc38b9da538b", "0868e5f49c02d63168e4da171c4e7ae1e5f7d5c08ce7fe3bc47c7059fa176b52ee0100eedeaa6ad63c2038b73d11f302f47b1b091ac07311b18cbacb8bbd90324bd2462a557b0ceb6bab739b0760629a6ddcf55e171dcf482fd041aa4502e41d9e7193ebf58b7a5134405f68c2b4b9c2a8cfb55224660b9a44c693ada176332569a0bed7e92ffcbfaafb0b826effb697f8c7a43cb5b4a3746e3513ecf38fd9f28d6d9ad18519914b18ab965c218ac8bc1c0ddefdf0abc1752de130c614dc100276b47708dc6009d7abbe10313ff84caf24ae62c70837e8dd4c77033dfe906d304161edde2f314c895a89b64a1d363b80a5452c456a9ec940e757e6e227fd5997c0b0b64ac3f6f839a7d83da370a65deafa16902b069d6f5f641e426ca1126376c5cdb052f1129023349d685b585124a5636953699d0e4ef0149e657055223c1833b565e16425f8f8812df1ed034bc3b5238806fe39de6640df23503fa3a34a81a0878a08b945d6479d6fb87d5ee9d299a153d60919e88e858990b83cf2434344b62d950b52624f32afe483cd557c25f724f7029dbddc7dc6c00a2a2ddac2dde7d266d7736cb86471ca569bfb8a8a37c55ceba2dd7b43fbcdaa9c9da0da3d0ff0e8e2af047431e582d2c590826cb9ebfeb73ff100fc1b1b2b7357671a79f4ab76c349a08ffa9247d466dc0615075b9e465ab2e4cf2affcf70d70c8f2c0a394b189dfa4c5c2f33559733a6c9f1efd860367d5389675b007af455c36d91dbe13c80f3a556d9c1e6122882895e550160626ba4891800372f4e82d8d571331b64b55af0cea6f43bd43082099507361ab738fac1dc349d1207e130b15c817c4d6352cc1d087ca300100fce25b93a6d8d1d842f79764717f0b7809ac40dc8b295c93bc09aa241b4b384bfcba6c148f54ee64637bfd137b0cbc57401bf69b00e8be6af70c96b225718c9c7e97134a5328d3d5d4bc4c7c5ef913c0094410c00e1fd6ea7d637f0d69869f6942f0d09055f5594ffb7c09e34d46a7fcb2949667231f89fdf549f7516db30f8dbc6267fd9d6192f8ba33d688655449e06b50979c8f9a4e14d2bb3f58369e9bb4ae71b6e68b1b6d3f567267bf83c08f0a98378646ddecd00dbd6eaf45b65c864d826b366f4ac8a2891602e556d5e10aac45fa085c65ce56e91a4cd9267483284b9a80c8e1f8e6957e45aa01d141093051417daef772f34734ff8aa7827f7940b418908effff223aa4e4ce3ae348341706f74ed6725dc7ff19349fd29c9392a81dbb4372c692104b96f9f29ed0bdfc20274d3690b329d188afe79ed25b29bd5c3bef62fb156b45a4f9bd2f8d06a5c357f0aa60cc27348d5d7befdd24ed07e3467839e03f98a5ecacf0749290ef9f080f87c1dc0c545f7a399d8b5e555aa8737bab27a0ecc34c8ae1d96044ca113d4b7adbf9cd051acb499f7175d0a15dbf20ea90fd10ca5745383dca7aa1656d2ffade488f484b80f43eec6c3de49d57f6d6dee737c4809e6d998b8ff9215dcf55781e38f4c7d1a816da0359f54aacfe468f0587e2a3ba69bcdb884b5eb56b262914701f490b49cba2ac8b5708155ec7a4344e89c48bea4e0670894aacc4b34df82f80f00974d7581dad3602b41f23987f56a5498f2fd4cbd263ce25d38d3152a01687804b389a85f8585b913606dd0e246b4f3857d6871424c555085c05521c2c5a3b7f36f52b02ea417ac1bb9da75746a6c454e2f60462f01665a0637741a66c136b50147abccac1f280690a2eb6bc3ad743fc323df08bb94dde9d198861c05764cacbb72e4bedbbfaf5d8e60706ef82c9e7e9630d075843af1f642db25d0046d2e0f3e51de0c8772c22bb2de9aedee3cf6411496fd891864d7ed7d847d7eaaeaec3917bce2bb3bdf79770d9e082cb825505ca9b6cb57f09a9d2bd9ddbc30242906f70e88a404a03faf06af69e0da1b4b4d91e910fe1c4d71febe1c96fd4d298334ed6c15ac2b82ae67e36b95c1a799a7e6d135b3fd41210b0364c15a96609798a02eb33877c4a9a295ce862aca3b5e5c8521201042a937c9dc6a14b47e82e0764bc90b5f6ddfa177c0a4492f387c1956d1e541fc5f51a2524f65f9ff670ff8b5dbdcba3c978523aef2faa404613d539042116a397414eeb0d897a5ba8ff64a86afe8132d2e12a6d41fdc63bebd39b8db390aa2ea243b72360f4a308731dacb2eaba825a2f243be5912b0f4152af64f39d4b2ad414a686b5242ca74dbec93dd6f34725fd55c4381bbfeb0a262d92805e2a59fb426891684e9fd107c593f1b006fd30ff97908780c8a92b589e4f2982333d216c91a2e1857241c56b0c6c0b3dac830d6866dc90e5f3b37c19d666c7dba9771a869b51e93051a6767768071b6ff1b8a761db3ce7eafc241a2b96b297bc888f01f59eedf129262274bbadddc2dcda65e7ab2d4adfdd4007e4dec69a0f8c5245ee711c9034e47a90c6b71956418fe1166d27d8e72e02c75b27aa7ec38ee04620973ceb78854ac60914f32771a8c3ebc0847c5f3e2d399746aecf8138a723cff7935667b9f715ffb25a49db8df80ff5c932d7af49b52e08ae32b030bfca5453773f9d885e9214424b419e8682f90b786eed6fa580ef7879358d26c5f47af4152aff8963fdd17d75fa5cf49287fdf9e528e601299e465d633478b71edc838479fed693fc9ee8cb2fe80d490afb4f60552421067a726b58b1a4fce344ab4aff0d10a456c84f7e2733729b5451ae51768a7d189654c715a727f9274057fad2adb4f77342de80bbf194ded1a7f759c4ea4cf80fea7fef280524168d6b49ab8f8781a37478ab1f0fa6ed3be9be4eab7ca7cf912cbfcbdf585fcaf8d7700323cfdc0e5e29c857ad094ec2fda73d43d4dd087a591bdac467b32f7422e1779fdb278e9c4dd2858079d1a9c76de7fa076956be888c88494d45b40e8209f6ed4d635f8d7d041d42ada463ea42fefdd2a6a1af55b20142d356f0a0dd0371e581d4aa68321fa97580f6e6dfca7602bfd8dfc559236f13bc225c45183ecf73c088d066238bbd85d314b36d5579ce67d7812a2c79367f347120c320dace6c4ae75e9bba5042fda04ef52b2109108cde4c3aa21de911e24446a1bc8c7799d71e7cb181f43eaf90d4785afee63f5446f8f0ecd459bae99ae5f70ad35611749ab0277ff720ec08177e64919d6164aa46f97189d744e92c1a9f1beb554bf5036a6955b0151677262eda27c811b50faa0641604f872e83b184a5c004f54f980562dd293dffdb9165977c4ebab8c590e941e8de8f5562a7f1e473e90fd0504721a1bc4cbe9416f5b90476fd94e9822a2e9f48b100338ee340a3da2be639cba72b457a996c963591f586594e2e5bec3b76d618650cf37d6e53f29425c4cf2420770124d2311176f9291af26a13dc436b17f119bd9c16d8761944e06d726e789b5e17f1181830f480ccbdaec1f98f65351d19ade96de194c5d8a1eb0ff4861a9982560a05023f1d11fe3cd1cdaaaf8e09fd8434030f497ffe13d886d5a1f94543a298bfa00fb967de34d87189f3a9cc11316ca809326b4681412c3884df1408ba164f33dcc6351243e64ccef5e42ac3632875f5bdb76e668cf19337707c9f275c37932d81b83a59cc38b907e89e95b1313cd434852097b4b5ee5d9766be99d0cac4ffcf44283ad4ea39a568fc941eeff7345d9b6a9b5a00beb55a2136c97431e1f52776c8c434035ae19edd928393430a7a28eca7ed41633e0c4e922412961f0bafa9b79004d82b0c0a91d529a64ea0cdf7e4f91ee99a682581a5934bb7599041fd11e7f6faff7665e31d62330b26c252de5d15c3bec9d04a42ddf08228ea3032489eff2fbe1859c6d582276c06db65d51f8505e2d4d798307101e8f429ffc7ccc96144cce3756d0baba9abab990f4ebf020a7030da3c329e4d6959a25d14748a8d8c43f694eccb898ee17b3db0001d2936f48b7e8ee41e34263a8360a260925d4a36f2a74fab9d162dc61b926f52ce8b9320940a6601f533572ea012cbe4a735dbfcfbb48c4173bcdd5cc238f4200e8b11be61592b7d140104c8b14441d003e3519da34b8fa1c126ed9f71f9487e83ebe83687b64fe2b7c4ef975ed297d33b486d57521a97b8d2c8d5888d5cecd826f85d888836e577802ce08e7ae4f9f685c26fa421b7a2f0901129f8a338254983cee145c2f83a79d2b709cd49587cd6ddfd9848e68269f45bd5c09c1c2eee1525a70850f40c232b76adc979b7fc2b2b0bdce6346746f44bc0b15b737c0cd06d5adfbde32ee0d3bbeb76409c072e0a019aacc703128871cecd0f5bfc3b4321b24991a42b65b1e829b2a42b69f16fc3ca999f7f40f99dadabf61c7108082e10307a9fe17e4a01f0cfa9b0eeb4abbd8bb4a6612452d0a3c3cde02a0bd8a8a3cc8805e89283f62e2fc2d2c4e905e1a15d0cda24ca559b278e4db10a4dc3ce956f15433c69575b5ab9e63ec2ca150fee887fdabb8f05b84ff099bef08df78ffd685904781ca1ec72b25c7eb1267cff4be6cda2fbe82cdca061ba51b9c2597b1ef3d9fdba2ffb1ebd01b3bf557b5b3f7cac672b78e6580e873c4dd4c455e75fdcebbfe1482684f0d3c3dd22681462c02ec50d10c550cfe108e44e37813fae935e3acbe8f3c1ef5340e16c714266febf236670bf47e1a5ea19d04a11de93a2cf1244dc6478f9ecc6d15252190fdd5c8e543d51f49220935b23ca31f0983642d5f84b29afa9247533ce7545ec771e6a0c221dd80d4401364b3d776dcb555a8a5034ccda63b87b9493baeff66c3d83557af5b831128364d68a9b1afb4c51b6544391a17a26111955c41a1bac71e744c134610601628d735bfe9b06570c5c88d5a77f41470b6dec13102e51e5cd161f3e0a4bd20f8ee075086668c426194c1df8a5f36be548b4aa5975f82d85bb668283adccef07cd5f57d3469e6ed2da9ce4ed4487eb204313af7e0e4e79e402b45ba4b9f7a026f84eaf889f989ca6a607f0a1bfad06fbf57e4dd8f6d670646b180fba838d54b09e5897f3970641718dab4b0a26d46cfa3fa08db4ae2493b3012b9becfb4be912da0cc1e06f30d5ae27d9abe97b7c6953090ce3249f7e5f40dfba01ee3ef123470695ed9bcc280a35f737ea9a76ec9f68a7cb2720171ee962e40be151b86a4667773abb9e2d0579acf5c839bea89cb801d2b6beda9862c4dd9f77cba7c4175c3b09956202f371b601b8e2ff4cef44a180a1540bcd5dff5aa007eb1134832323e7b82fc0bcc6d3fc320fd8eec7d1c6c3e7e811646b36862b96faf251ff99f2f0461a031a35810435ee90092a41190cd2324426a6a33df0849d95a4b1f4cbccf7c5ebc22967bdfed828e480bbdba21240b49f04f08bfb41e50b67dc74effda67ab9e7ed9b2a796880d21e3d718ad98767527ace43a6a0a8ca00c"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r1, 0xd000943d, &(0x7f0000115c80)={0xfffffffffffffff8, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}], 0x3f, "a9dc0978c38a1a"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r1, 0xd000943d, &(0x7f0000116c80)={0xff, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x1, "2edfd1a840a5af"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f0000117c80)={0xfffffffffffffffc, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r2}, {0x0, r3}, {r4, r5}, {r6, r7}, {r8, r9}, {r10}, {r11, r12}, {r13}, {0x0, r14}, {r15, r16}, {}, {r17}, {r18, r19}, {r20, r21}], 0x1f, "e49d06d39c6286"}) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f00000000c0)) (async) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000040)) (async) setsockopt$inet_mtu(0xffffffffffffffff, 0x0, 0xa, &(0x7f0000000080)=0x1, 0x4) accept$inet(r1, 0x0, &(0x7f0000000100)) 00:20:10 executing program 1: ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r0 = openat$udambuf(0xffffffffffffff9c, &(0x7f0000000000), 0x2) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000080)) 00:20:10 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xe00, &(0x7f0000001440)) 00:20:10 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:10 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:10 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x4020940d, &(0x7f0000001440)) 00:20:10 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x2]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:10 executing program 3: ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r0 = openat$udambuf(0xffffffffffffff9c, &(0x7f0000000000), 0x2) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000080)) 00:20:10 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:10 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1100, &(0x7f0000001440)) 00:20:10 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x3]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:10 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x50540000, &(0x7f0000001440)) 00:20:10 executing program 3: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETGAMMA(r0, 0xc02064a4, &(0x7f00000009c0)={0x0, 0x0, 0x0, 0x0, 0x0}) r1 = syz_open_dev$dri(&(0x7f0000000600), 0xffffffffffff43d3, 0x1d3000) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000540)={&(0x7f0000000440)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000480)=[0x0, 0x0, 0x0], &(0x7f00000004c0)=[0x0], &(0x7f0000000500)=[0x0, 0x0, 0x0, 0x0], 0x9, 0x3, 0x1, 0x4}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000580)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000180)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x4, 0x5, 0x0, r4}) ioctl$DRM_IOCTL_MODE_GETENCODER(0xffffffffffffffff, 0xc01464a6, &(0x7f0000000000)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_GETPLANE(r1, 0xc02064b6, &(0x7f0000000680)={0x0, r3, r2, 0x0, 0x0, 0x7, &(0x7f0000000640)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]}) ioctl$DRM_IOCTL_MODE_GETGAMMA(r1, 0xc02064a4, &(0x7f0000000780)={r6, 0x4, &(0x7f00000006c0)=[0x42e6, 0x1, 0x0, 0xfff], &(0x7f0000000700)=[0x8, 0x7], &(0x7f0000000740)=[0x1ff, 0x5, 0x6d1c, 0x2, 0x81b2, 0x4, 0x8001, 0x3ff, 0x5]}) ioctl$DRM_IOCTL_MODE_GETGAMMA(r0, 0xc02064a4, &(0x7f0000000100)={r5, 0x6, &(0x7f0000000040)=[0x0, 0x1000, 0x80, 0x1, 0x9, 0x1ff], &(0x7f0000000080)=[0xf3, 0x5efa, 0x9, 0x3, 0xff, 0x9], &(0x7f00000000c0)=[0x401, 0x6, 0x3f]}) r7 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r7, 0x0, &(0x7f0000001440)) 00:20:10 executing program 0: r0 = accept4$inet(0xffffffffffffffff, &(0x7f0000000040)={0x2, 0x0, @multicast2}, &(0x7f0000000080)=0x10, 0x800) getsockname$inet(r0, &(0x7f00000000c0)={0x2, 0x0, @local}, &(0x7f0000000100)=0x10) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) 00:20:10 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x4]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:10 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1200, &(0x7f0000001440)) 00:20:10 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x51540000, &(0x7f0000001440)) 00:20:10 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) 00:20:10 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x5]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:10 executing program 0: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETGAMMA(r0, 0xc02064a4, &(0x7f00000009c0)={0x0, 0x0, 0x0, 0x0, 0x0}) r1 = syz_open_dev$dri(&(0x7f0000000600), 0xffffffffffff43d3, 0x1d3000) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000540)={&(0x7f0000000440)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000480)=[0x0, 0x0, 0x0], &(0x7f00000004c0)=[0x0], &(0x7f0000000500)=[0x0, 0x0, 0x0, 0x0], 0x9, 0x3, 0x1, 0x4}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000580)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000180)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x4, 0x5, 0x0, r4}) ioctl$DRM_IOCTL_MODE_GETENCODER(0xffffffffffffffff, 0xc01464a6, &(0x7f0000000000)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_GETPLANE(r1, 0xc02064b6, &(0x7f0000000680)={0x0, r3, r2, 0x0, 0x0, 0x7, &(0x7f0000000640)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]}) ioctl$DRM_IOCTL_MODE_GETGAMMA(r1, 0xc02064a4, &(0x7f0000000780)={r6, 0x4, &(0x7f00000006c0)=[0x42e6, 0x1, 0x0, 0xfff], &(0x7f0000000700)=[0x8, 0x7], &(0x7f0000000740)=[0x1ff, 0x5, 0x6d1c, 0x2, 0x81b2, 0x4, 0x8001, 0x3ff, 0x5]}) ioctl$DRM_IOCTL_MODE_GETGAMMA(r0, 0xc02064a4, &(0x7f0000000100)={r5, 0x6, &(0x7f0000000040)=[0x0, 0x1000, 0x80, 0x1, 0x9, 0x1ff], &(0x7f0000000080)=[0xf3, 0x5efa, 0x9, 0x3, 0xff, 0x9], &(0x7f00000000c0)=[0x401, 0x6, 0x3f]}) r7 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r7, 0x0, &(0x7f0000001440)) syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) (async) ioctl$DRM_IOCTL_MODE_GETGAMMA(r0, 0xc02064a4, &(0x7f00000009c0)={0x0, 0x0, 0x0, 0x0, 0x0}) (async) syz_open_dev$dri(&(0x7f0000000600), 0xffffffffffff43d3, 0x1d3000) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000540)={&(0x7f0000000440)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000480)=[0x0, 0x0, 0x0], &(0x7f00000004c0)=[0x0], &(0x7f0000000500)=[0x0, 0x0, 0x0, 0x0], 0x9, 0x3, 0x1, 0x4}) (async) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000580)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000180)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x4, 0x5, 0x0, r4}) (async) ioctl$DRM_IOCTL_MODE_GETENCODER(0xffffffffffffffff, 0xc01464a6, &(0x7f0000000000)) (async) ioctl$DRM_IOCTL_MODE_GETPLANE(r1, 0xc02064b6, &(0x7f0000000680)={0x0, r3, r2, 0x0, 0x0, 0x7, &(0x7f0000000640)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]}) (async) ioctl$DRM_IOCTL_MODE_GETGAMMA(r1, 0xc02064a4, &(0x7f0000000780)={r6, 0x4, &(0x7f00000006c0)=[0x42e6, 0x1, 0x0, 0xfff], &(0x7f0000000700)=[0x8, 0x7], &(0x7f0000000740)=[0x1ff, 0x5, 0x6d1c, 0x2, 0x81b2, 0x4, 0x8001, 0x3ff, 0x5]}) (async) ioctl$DRM_IOCTL_MODE_GETGAMMA(r0, 0xc02064a4, &(0x7f0000000100)={r5, 0x6, &(0x7f0000000040)=[0x0, 0x1000, 0x80, 0x1, 0x9, 0x1ff], &(0x7f0000000080)=[0xf3, 0x5efa, 0x9, 0x3, 0xff, 0x9], &(0x7f00000000c0)=[0x401, 0x6, 0x3f]}) (async) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r7, 0x0, &(0x7f0000001440)) (async) 00:20:10 executing program 3: r0 = accept4$inet(0xffffffffffffffff, &(0x7f0000000040)={0x2, 0x0, @multicast2}, &(0x7f0000000080)=0x10, 0x800) getsockname$inet(r0, &(0x7f00000000c0)={0x2, 0x0, @local}, &(0x7f0000000100)=0x10) (async) getsockname$inet(r0, &(0x7f00000000c0)={0x2, 0x0, @local}, &(0x7f0000000100)=0x10) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) 00:20:10 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x6]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:10 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x52540000, &(0x7f0000001440)) 00:20:10 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1800, &(0x7f0000001440)) 00:20:10 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) accept$inet(0xffffffffffffffff, &(0x7f0000000000)={0x2, 0x0, @loopback}, &(0x7f0000000040)=0x10) 00:20:10 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = syz_open_dev$swradio(&(0x7f0000000000), 0x0, 0x2) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000040)) 00:20:10 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x7]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:10 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5c000000, &(0x7f0000001440)) 00:20:10 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = accept$inet(0xffffffffffffffff, &(0x7f0000000000)={0x2, 0x0, @multicast2}, &(0x7f0000000040)=0x10) accept$inet(r1, &(0x7f0000000080)={0x2, 0x0, @remote}, &(0x7f0000000100)=0x10) r2 = openat$nci(0xffffffffffffff9c, &(0x7f00000000c0), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f0000000140)) 00:20:10 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) accept$inet(0xffffffffffffffff, &(0x7f0000000000)={0x2, 0x0, @loopback}, &(0x7f0000000040)=0x10) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) accept$inet(0xffffffffffffffff, &(0x7f0000000000)={0x2, 0x0, @loopback}, &(0x7f0000000040)=0x10) (async) 00:20:11 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x8]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:11 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1800, &(0x7f0000001440)) 00:20:11 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) syz_open_dev$swradio(&(0x7f0000000000), 0x0, 0x2) (async) r1 = syz_open_dev$swradio(&(0x7f0000000000), 0x0, 0x2) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000040)) 00:20:11 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5c000000, &(0x7f0000001440)) 00:20:11 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x60540000, &(0x7f0000001440)) 00:20:11 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x9]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:11 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) r1 = accept$inet(0xffffffffffffffff, &(0x7f0000000000)={0x2, 0x0, @multicast2}, &(0x7f0000000040)=0x10) accept$inet(r1, &(0x7f0000000080)={0x2, 0x0, @remote}, &(0x7f0000000100)=0x10) (async, rerun: 32) r2 = openat$nci(0xffffffffffffff9c, &(0x7f00000000c0), 0x2, 0x0) (rerun: 32) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f0000000140)) 00:20:11 executing program 1: syz_open_pts(0xffffffffffffffff, 0x501880) ioctl$SECCOMP_IOCTL_NOTIF_RECV(0xffffffffffffffff, 0xc0502100, &(0x7f0000000040)={0x0, 0x0}) syz_open_procfs$namespace(r0, &(0x7f00000000c0)='ns/user\x00') r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(0xffffffffffffffff, 0xc0502100, &(0x7f0000000100)={0x0, 0x0}) syz_open_procfs$namespace(r2, &(0x7f0000000180)='ns/uts\x00') ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) 00:20:11 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1b54, &(0x7f0000001440)) 00:20:11 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0xa]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:11 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:11 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x66631840, &(0x7f0000001440)) 00:20:11 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0xb]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:11 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x2154, &(0x7f0000001440)) 00:20:11 executing program 3: syz_open_pts(0xffffffffffffffff, 0x501880) (async) ioctl$SECCOMP_IOCTL_NOTIF_RECV(0xffffffffffffffff, 0xc0502100, &(0x7f0000000040)={0x0, 0x0}) syz_open_procfs$namespace(r0, &(0x7f00000000c0)='ns/user\x00') (async, rerun: 64) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (rerun: 64) ioctl$SECCOMP_IOCTL_NOTIF_RECV(0xffffffffffffffff, 0xc0502100, &(0x7f0000000100)={0x0, 0x0}) syz_open_procfs$namespace(r2, &(0x7f0000000180)='ns/uts\x00') (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) (async, rerun: 64) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) (rerun: 64) 00:20:11 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0xc]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:11 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:11 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x775804c0, &(0x7f0000001440)) 00:20:11 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x2500, &(0x7f0000001440)) 00:20:11 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0xd]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:11 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) syz_genetlink_get_family_id$ethtool(&(0x7f0000000240), r1) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000380)) r2 = openat$nci(0xffffffffffffff9c, &(0x7f0000000080), 0x2, 0x0) openat$nci(0xffffffffffffff9c, &(0x7f0000000440), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f0000000400)) r3 = accept$phonet_pipe(0xffffffffffffffff, &(0x7f0000000040), &(0x7f00000000c0)=0x10) getsockopt$PNPIPE_ENCAP(r3, 0x113, 0x1, &(0x7f0000000100), &(0x7f0000000140)=0x4) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000180)) syz_genetlink_get_family_id$ethtool(&(0x7f00000003c0), r1) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:11 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0xe]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:11 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x785804c0, &(0x7f0000001440)) 00:20:11 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x3389, &(0x7f0000001440)) 00:20:11 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x785804c0, &(0x7f0000001440)) 00:20:11 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x3389, &(0x7f0000001440)) 00:20:11 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x10]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:11 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x80086301, &(0x7f0000001440)) 00:20:11 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x11]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:11 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5054, &(0x7f0000001440)) 00:20:11 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) syz_genetlink_get_family_id$ethtool(&(0x7f0000000240), r1) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000380)) r2 = openat$nci(0xffffffffffffff9c, &(0x7f0000000080), 0x2, 0x0) openat$nci(0xffffffffffffff9c, &(0x7f0000000440), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f0000000400)) (async) r3 = accept$phonet_pipe(0xffffffffffffffff, &(0x7f0000000040), &(0x7f00000000c0)=0x10) getsockopt$PNPIPE_ENCAP(r3, 0x113, 0x1, &(0x7f0000000100), &(0x7f0000000140)=0x4) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000180)) (async) syz_genetlink_get_family_id$ethtool(&(0x7f00000003c0), r1) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:11 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x12]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:12 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x80086301, &(0x7f0000001440)) 00:20:12 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x80086601, &(0x7f0000001440)) 00:20:12 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x18]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) [ 1212.108811][ T2049] workqueue: Failed to create a rescuer kthread for wq "nfc3_nci_cmd_wq": -EINTR 00:20:12 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5154, &(0x7f0000001440)) 00:20:12 executing program 0: r0 = accept4$inet(0xffffffffffffffff, &(0x7f0000000000)={0x2, 0x0, @dev}, &(0x7f0000000040)=0x10, 0x80800) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f00000000c0)={'filter\x00', 0x7, 0x4, 0x3f0, 0x0, 0x218, 0x218, 0x308, 0x308, 0x308, 0x4, &(0x7f0000000080), {[{{@uncond, 0xc0, 0x110}, @mangle={0x50, 'mangle\x00', 0x0, {@mac=@broadcast, @empty, @initdev={0xac, 0x1e, 0x1, 0x0}, @multicast2, 0xf}}}, {{@uncond, 0xc0, 0x108}, @unspec=@LED={0x48, 'LED\x00', 0x0, {'syz1\x00', 0x0, 0x1, {0x6}}}}, {{@uncond, 0xc0, 0xf0}, @unspec=@CONNMARK={0x30, 'CONNMARK\x00', 0x1, {0x7, 0xfffffbff, 0x0, 0x1}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x440) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000500)) [ 1212.155541][ T2050] workqueue: Failed to create a rescuer kthread for wq "nfc6_nci_tx_wq": -EINTR 00:20:12 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:12 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000100), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) r1 = socket$l2tp(0x2, 0x2, 0x73) setsockopt$IPT_SO_SET_ADD_COUNTERS(r1, 0x0, 0x41, 0x0, 0x58) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000080)) 00:20:12 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x801c581f, &(0x7f0000001440)) 00:20:12 executing program 2: r0 = accept4$inet(0xffffffffffffffff, &(0x7f0000000000)={0x2, 0x0, @dev}, &(0x7f0000000040)=0x10, 0x80800) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f00000000c0)={'filter\x00', 0x7, 0x4, 0x3f0, 0x0, 0x218, 0x218, 0x308, 0x308, 0x308, 0x4, &(0x7f0000000080), {[{{@uncond, 0xc0, 0x110}, @mangle={0x50, 'mangle\x00', 0x0, {@mac=@broadcast, @empty, @initdev={0xac, 0x1e, 0x1, 0x0}, @multicast2, 0xf}}}, {{@uncond, 0xc0, 0x108}, @unspec=@LED={0x48, 'LED\x00', 0x0, {'syz1\x00', 0x0, 0x1, {0x6}}}}, {{@uncond, 0xc0, 0xf0}, @unspec=@CONNMARK={0x30, 'CONNMARK\x00', 0x1, {0x7, 0xfffffbff, 0x0, 0x1}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x440) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000500)) 00:20:12 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5254, &(0x7f0000001440)) 00:20:12 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x48]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:12 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000080), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GET_LEASE(r1, 0xc01064c8, &(0x7f0000000480)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000180)={&(0x7f0000000000)=[0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x2, 0x8, 0x8, 0x8}) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000040)) 00:20:12 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x4c]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:12 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = socket$inet_sctp(0x2, 0x1, 0x84) bind$inet(r1, &(0x7f0000000000)={0x2, 0x5, @empty}, 0x10) setsockopt$inet_mtu(r1, 0x0, 0xa, &(0x7f0000000040), 0x4) 00:20:12 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:12 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc0045878, &(0x7f0000001440)) 00:20:12 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x541b, &(0x7f0000001440)) 00:20:12 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x6c]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:12 executing program 3: openat$nci(0xffffffffffffff9c, &(0x7f0000000100), 0x2, 0x0) (async) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000100), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) r1 = socket$l2tp(0x2, 0x2, 0x73) setsockopt$IPT_SO_SET_ADD_COUNTERS(r1, 0x0, 0x41, 0x0, 0x58) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000080)) (async) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000080)) 00:20:12 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000080), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) r1 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GET_LEASE(r1, 0xc01064c8, &(0x7f0000000480)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000180)={&(0x7f0000000000)=[0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x2, 0x8, 0x8, 0x8}) (async) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000040)) 00:20:12 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) r1 = socket$inet_sctp(0x2, 0x1, 0x84) bind$inet(r1, &(0x7f0000000000)={0x2, 0x5, @empty}, 0x10) (async) setsockopt$inet_mtu(r1, 0x0, 0xa, &(0x7f0000000040), 0x4) 00:20:12 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc0045878, &(0x7f0000001440)) 00:20:12 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x74]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:12 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) socket$inet6_udplite(0xa, 0x2, 0x88) 00:20:12 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:12 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5421, &(0x7f0000001440)) 00:20:12 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$udambuf(0xffffffffffffff9c, &(0x7f0000000000), 0x2) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r2 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x801c581f, 0x0) r3 = memfd_create(&(0x7f0000000240)='/dev/udmabuf\x00', 0x4) ioctl$UDMABUF_CREATE_LIST(r2, 0x40087543, &(0x7f0000000280)={0x1, 0x3, [{}, {r3, 0x0, 0xfffffffffffff000}, {0xffffffffffffffff, 0x0, 0x100000000, 0x1000000}]}) 00:20:12 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x7a]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:12 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc0189436, &(0x7f0000001440)) 00:20:12 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) 00:20:12 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5450, &(0x7f0000001440)) 00:20:12 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) (async) socket$inet6_udplite(0xa, 0x2, 0x88) 00:20:13 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x7a]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:13 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0xe8]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:13 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc020660b, &(0x7f0000001440)) 00:20:13 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = syz_open_dev$midi(&(0x7f0000000180), 0x7f, 0x28200) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000200)) setsockopt$inet_msfilter(0xffffffffffffffff, 0x0, 0x29, &(0x7f0000000240)={@loopback, @loopback, 0x0, 0x4, [@private=0xa010101, @empty, @rand_addr=0x64010100, @loopback]}, 0x20) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f00000001c0)) r2 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) ioctl$LOOP_CTL_ADD(r2, 0x40305839, 0xa) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0], 0x6, 0x6, 0x5, 0x1}) 00:20:13 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r1 = openat$udambuf(0xffffffffffffff9c, &(0x7f0000000000), 0x2) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) r2 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x801c581f, 0x0) (async, rerun: 64) r3 = memfd_create(&(0x7f0000000240)='/dev/udmabuf\x00', 0x4) (rerun: 64) ioctl$UDMABUF_CREATE_LIST(r2, 0x40087543, &(0x7f0000000280)={0x1, 0x3, [{}, {r3, 0x0, 0xfffffffffffff000}, {0xffffffffffffffff, 0x0, 0x100000000, 0x1000000}]}) 00:20:13 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$nci(0xffffffffffffff9c, &(0x7f00000001c0), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000200)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r2 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0), &(0x7f0000000100)=[0x0], 0x3, 0x5, 0x0, 0x1}) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000180)) 00:20:13 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:13 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5451, &(0x7f0000001440)) 00:20:13 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc9480440, &(0x7f0000001440)) 00:20:13 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) r1 = syz_open_dev$midi(&(0x7f0000000180), 0x7f, 0x28200) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000200)) setsockopt$inet_msfilter(0xffffffffffffffff, 0x0, 0x29, &(0x7f0000000240)={@loopback, @loopback, 0x0, 0x4, [@private=0xa010101, @empty, @rand_addr=0x64010100, @loopback]}, 0x20) (async) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f00000001c0)) (async) r2 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) ioctl$LOOP_CTL_ADD(r2, 0x40305839, 0xa) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0], 0x6, 0x6, 0x5, 0x1}) 00:20:13 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x2]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:13 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async, rerun: 64) r1 = openat$nci(0xffffffffffffff9c, &(0x7f00000001c0), 0x2, 0x0) (rerun: 64) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000200)) (async, rerun: 64) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (rerun: 64) r2 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0), &(0x7f0000000100)=[0x0], 0x3, 0x5, 0x0, 0x1}) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000180)) 00:20:13 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) 00:20:13 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5452, &(0x7f0000001440)) 00:20:13 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xca540440, &(0x7f0000001440)) 00:20:13 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x3]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:13 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:13 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x4]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:13 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) [ 1212.207129][ T1846] workqueue: Failed to create a rescuer kthread for wq "nfc2_nci_cmd_wq": -EINTR [ 1213.402099][ T2303] workqueue: Failed to create a rescuer kthread for wq "nfc5_nci_cmd_wq": -EINTR 00:20:13 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x5]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:13 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5460, &(0x7f0000001440)) 00:20:13 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xdd480440, &(0x7f0000001440)) 00:20:13 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) r1 = accept4$inet(0xffffffffffffffff, &(0x7f0000000040)={0x2, 0x0, @initdev}, &(0x7f0000000080)=0x10, 0x40800) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f00000000c0)) r2 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f00000001c0)) ioctl$LOOP_CTL_ADD(r2, 0x40305829, 0xa) ioctl$IOMMU_VFIO_IOAS$GET(0xffffffffffffffff, 0x3b88, &(0x7f0000000100)={0xc, 0x0}) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(r2, 0x3ba0, &(0x7f0000000140)={0x48, 0x5, r3, 0x0, 0xffffffffffffffff, 0x1}) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:13 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:13 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:13 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:13 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5c00, &(0x7f0000001440)) 00:20:13 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x6]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:13 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) r1 = accept4$inet(0xffffffffffffffff, &(0x7f0000000040)={0x2, 0x0, @initdev}, &(0x7f0000000080)=0x10, 0x40800) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f00000000c0)) r2 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f00000001c0)) (async, rerun: 32) ioctl$LOOP_CTL_ADD(r2, 0x40305829, 0xa) (async, rerun: 32) ioctl$IOMMU_VFIO_IOAS$GET(0xffffffffffffffff, 0x3b88, &(0x7f0000000100)={0xc, 0x0}) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(r2, 0x3ba0, &(0x7f0000000140)={0x48, 0x5, r3, 0x0, 0xffffffffffffffff, 0x1}) (async, rerun: 32) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (rerun: 32) [ 1213.414344][ T2306] workqueue: Failed to create a rescuer kthread for wq "nfc6_nci_cmd_wq": -EINTR [ 1213.680589][ T2396] xt_check_table_hooks: 39 callbacks suppressed [ 1213.680605][ T2396] x_tables: duplicate underflow at hook 1 00:20:13 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:13 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:13 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000040)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:13 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfffffdfd, &(0x7f0000001440)) 00:20:13 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x6]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:13 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x7]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:13 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x5460, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) [ 1213.797171][ T2418] x_tables: duplicate underflow at hook 1 00:20:13 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) [ 1213.837345][ T2425] x_tables: duplicate underflow at hook 1 00:20:13 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xffffff7f, &(0x7f0000001440)) 00:20:13 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x6054, &(0x7f0000001440)) 00:20:13 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r1, 0xc05064a7, &(0x7f0000000300)={&(0x7f0000000040)=[0x0, 0x0], &(0x7f0000000080)=[{}, {}, {}, {}, {}, {}, {}], &(0x7f0000000280)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000002c0)=[0x0], 0x7, 0x6, 0x2}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r1, 0xc02064b9, &(0x7f0000000400)={&(0x7f0000000380)=[0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x1}) ioctl$DRM_IOCTL_MODE_GET_LEASE(r1, 0xc01064c8, &(0x7f0000000480)={0x4, 0x0, &(0x7f0000000440)=[0x0, 0x0, 0x0, 0x0]}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000000500)={&(0x7f00000004c0)=[r2, r3, 0x0, r4], 0x4}) ioctl$LOOP_CTL_ADD(r1, 0x40305828, 0xa) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:13 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x8]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:13 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x541b, 0x0) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, 0xfffffffffffffffe, &(0x7f0000000080)) [ 1213.949679][ T2453] x_tables: duplicate underflow at hook 1 00:20:13 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async, rerun: 64) r1 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) (rerun: 64) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r1, 0xc05064a7, &(0x7f0000000300)={&(0x7f0000000040)=[0x0, 0x0], &(0x7f0000000080)=[{}, {}, {}, {}, {}, {}, {}], &(0x7f0000000280)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000002c0)=[0x0], 0x7, 0x6, 0x2}) (async) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r1, 0xc02064b9, &(0x7f0000000400)={&(0x7f0000000380)=[0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x1}) (async) ioctl$DRM_IOCTL_MODE_GET_LEASE(r1, 0xc01064c8, &(0x7f0000000480)={0x4, 0x0, &(0x7f0000000440)=[0x0, 0x0, 0x0, 0x0]}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000000500)={&(0x7f00000004c0)=[r2, r3, 0x0, r4], 0x4}) ioctl$LOOP_CTL_ADD(r1, 0x40305828, 0xa) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:13 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x5460, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x5460, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000000)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) 00:20:13 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x9]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:14 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x8933, &(0x7f0000001440)) [ 1214.030730][ T2467] x_tables: duplicate underflow at hook 1 00:20:14 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) (async, rerun: 64) ioctl$IOMMU_VFIO_IOAS$GET(0xffffffffffffffff, 0x3b88, &(0x7f0000000b40)={0xc, 0x0}) (rerun: 64) ioctl$IOMMU_TEST_OP_DESTROY_ACCESS_PAGES(0xffffffffffffffff, 0x3ba0, &(0x7f0000000c00)={0x48, 0x6, r1}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(r0, 0x3ba0, &(0x7f0000000000)={0x48, 0x7, 0xffffffffffffffff, 0x0, 0x10001, 0x0, 0x7, 0x4614a, 0x41b66}) ioctl$IOMMU_TEST_OP_DESTROY_ACCESS_PAGES(r0, 0x3ba0, &(0x7f0000000080)={0x48, 0x6, r1, 0x0, r2}) 00:20:14 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x400448c9, 0x0) 00:20:14 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0xa]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:14 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x541b, 0x0) (async) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, 0xfffffffffffffffe, &(0x7f0000000080)) [ 1214.042711][ T2458] workqueue: Failed to create a rescuer kthread for wq "nfc5_nci_cmd_wq": -EINTR [ 1214.119197][ T2486] x_tables: duplicate underflow at hook 1 00:20:14 executing program 0: r0 = seccomp$SECCOMP_SET_MODE_FILTER_LISTENER(0x1, 0x8, &(0x7f0000000040)={0x1, &(0x7f0000000000)=[{0x5, 0xea, 0xa4, 0x7}]}) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000080)) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000100)={0x0}) r3 = socket$l2tp(0x2, 0x2, 0x73) setsockopt$IPT_SO_SET_ADD_COUNTERS(r3, 0x0, 0x41, &(0x7f00000004c0)=ANY=[@ANYBLOB="726177000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000f1a17dd220d430ff2404ce14e10000"], 0x48) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000180)={r2, 0x1, r3}) 00:20:14 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1000000, &(0x7f0000001440)) 00:20:14 executing program 3: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000080), &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x8, 0x8}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0], 0x4, 0x5, 0x6, 0x1}) 00:20:14 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0xb]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:14 executing program 1: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(0xffffffffffffffff, 0x3b88, &(0x7f0000000000)={0xc}) [ 1214.243989][ T2508] x_tables: duplicate underflow at hook 1 00:20:14 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = openat$zero(0xffffffffffffff9c, &(0x7f0000001280), 0x0, 0x0) read$midi(r1, &(0x7f0000000000)=""/177, 0xb1) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000180)={&(0x7f0000000080)=[0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0], &(0x7f0000000140)=[0x0, 0x0], 0x3, 0x2, 0x3, 0x2}) 00:20:14 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0xc]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:14 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x400448dd, 0x0) 00:20:14 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) 00:20:14 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1630880, &(0x7f0000001440)) 00:20:14 executing program 2: r0 = seccomp$SECCOMP_SET_MODE_FILTER_LISTENER(0x1, 0x8, &(0x7f0000000040)={0x1, &(0x7f0000000000)=[{0x5, 0xea, 0xa4, 0x7}]}) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000080)) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000100)={0x0}) r3 = socket$l2tp(0x2, 0x2, 0x73) setsockopt$IPT_SO_SET_ADD_COUNTERS(r3, 0x0, 0x41, &(0x7f00000004c0)=ANY=[@ANYBLOB="726177000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000f1a17dd220d430ff2404ce14e10000"], 0x48) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000180)={r2, 0x1, r3}) seccomp$SECCOMP_SET_MODE_FILTER_LISTENER(0x1, 0x8, &(0x7f0000000040)={0x1, &(0x7f0000000000)=[{0x5, 0xea, 0xa4, 0x7}]}) (async) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000080)) (async) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) (async) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000100)) (async) socket$l2tp(0x2, 0x2, 0x73) (async) setsockopt$IPT_SO_SET_ADD_COUNTERS(r3, 0x0, 0x41, &(0x7f00000004c0)=ANY=[@ANYBLOB="726177000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000f1a17dd220d430ff2404ce14e10000"], 0x48) (async) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000180)={r2, 0x1, r3}) (async) 00:20:14 executing program 4: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) (async) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(0xffffffffffffffff, 0x3b88, &(0x7f0000000000)={0xc}) [ 1214.381976][ T2524] x_tables: duplicate underflow at hook 1 00:20:14 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000040)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = openat$zero(0xffffffffffffff9c, &(0x7f0000001280), 0x0, 0x0) read$midi(r1, &(0x7f0000000000)=""/177, 0xb1) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000180)={&(0x7f0000000080)=[0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0], &(0x7f0000000140)=[0x0, 0x0], 0x3, 0x2, 0x3, 0x2}) 00:20:14 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x400454ca, 0x0) 00:20:14 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0xd]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:14 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1660880, &(0x7f0000001440)) [ 1214.481315][ T2543] workqueue: Failed to create a rescuer kthread for wq "nfc3_nci_cmd_wq": -EINTR [ 1214.502886][ T2551] x_tables: duplicate underflow at hook 1 00:20:14 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0xa002, 0x0) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x40086602, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000080)={0x48, 0x8, r1, 0x0, 0xa, 0x3d4f91, 0x27, &(0x7f0000000040)="ae1cb62ae2ce3441697a4633cdc0fbfb2550ddd3e3524939acfaa8f85868a17b511ebda24f3dda", 0x10004}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:14 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x40049409, 0x0) 00:20:14 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000000040)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x9, 0x800, 0x0, 0xffffffffffffffff}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000180)={&(0x7f0000000080)=[0x0], &(0x7f00000000c0)=[0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], 0x1, 0x2, 0x4, 0x4}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f00000002c0)={&(0x7f00000001c0)=[0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0], &(0x7f0000000240)=[0x0, 0x0], &(0x7f0000000280)=[0x0, 0x0], 0x3, 0x2, 0x2, 0x2}) 00:20:14 executing program 1: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000080), &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x8, 0x8}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0], 0x4, 0x5, 0x6, 0x1}) 00:20:14 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0xe]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:14 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000400)={&(0x7f0000000300)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000340)=[0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x6, 0x2, 0x2, 0x6}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r1, 0xc02064b9, &(0x7f0000001200)={0x0, 0x0, 0x0, r2}) r3 = syz_genetlink_get_family_id$nfc(&(0x7f00000001c0), 0xffffffffffffffff) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000200)=0x0) sendmsg$NFC_CMD_DEV_UP(0xffffffffffffffff, &(0x7f00000002c0)={&(0x7f0000000180)={0x10, 0x0, 0x0, 0x200}, 0xc, &(0x7f0000000280)={&(0x7f0000000240)=ANY=[@ANYBLOB="05000100", @ANYRES16=r3, @ANYBLOB="020029bd7000fcdbdf250200000008000100", @ANYRES32=r4, @ANYBLOB], 0x1c}, 0x1, 0x0, 0x0, 0x4c040}, 0x8044) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0], 0x4, 0x9, 0xa, 0x1}) 00:20:14 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0xa002, 0x0) (async) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x40086602, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000080)={0x48, 0x8, r1, 0x0, 0xa, 0x3d4f91, 0x27, &(0x7f0000000040)="ae1cb62ae2ce3441697a4633cdc0fbfb2550ddd3e3524939acfaa8f85868a17b511ebda24f3dda", 0x10004}) (async, rerun: 64) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) (rerun: 64) [ 1214.687185][ T2570] x_tables: duplicate underflow at hook 1 00:20:14 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x2000000, &(0x7f0000001440)) 00:20:14 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x40086602, 0x0) 00:20:14 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x10]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:14 executing program 0: r0 = syz_open_dev$dri(&(0x7f0000000000), 0xfffffffffffffffc, 0x0) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(r0, 0xc00864bf, &(0x7f0000000040)={0x0, 0x1}) 00:20:14 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000000040)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x9, 0x800}) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000000040)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x9, 0x800, 0x0, 0xffffffffffffffff}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000180)={&(0x7f0000000080)=[0x0], &(0x7f00000000c0)=[0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], 0x1, 0x2, 0x4, 0x4}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f00000002c0)={&(0x7f00000001c0)=[0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0], &(0x7f0000000240)=[0x0, 0x0], &(0x7f0000000280)=[0x0, 0x0], 0x3, 0x2, 0x2, 0x2}) 00:20:14 executing program 2: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x604041, 0x0) r0 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000000000), 0x10802, 0x0) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000080)) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f00000000c0)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:14 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) (async) r1 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000400)={&(0x7f0000000300)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000340)=[0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x6, 0x2, 0x2, 0x6}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r1, 0xc02064b9, &(0x7f0000001200)={0x0, 0x0, 0x0, r2}) r3 = syz_genetlink_get_family_id$nfc(&(0x7f00000001c0), 0xffffffffffffffff) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000200)=0x0) sendmsg$NFC_CMD_DEV_UP(0xffffffffffffffff, &(0x7f00000002c0)={&(0x7f0000000180)={0x10, 0x0, 0x0, 0x200}, 0xc, &(0x7f0000000280)={&(0x7f0000000240)=ANY=[@ANYBLOB="05000100", @ANYRES16=r3, @ANYBLOB="020029bd7000fcdbdf250200000008000100", @ANYRES32=r4, @ANYBLOB], 0x1c}, 0x1, 0x0, 0x0, 0x4c040}, 0x8044) (async) sendmsg$NFC_CMD_DEV_UP(0xffffffffffffffff, &(0x7f00000002c0)={&(0x7f0000000180)={0x10, 0x0, 0x0, 0x200}, 0xc, &(0x7f0000000280)={&(0x7f0000000240)=ANY=[@ANYBLOB="05000100", @ANYRES16=r3, @ANYBLOB="020029bd7000fcdbdf250200000008000100", @ANYRES32=r4, @ANYBLOB], 0x1c}, 0x1, 0x0, 0x0, 0x4c040}, 0x8044) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000100)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0], 0x4, 0x9, 0xa, 0x1}) 00:20:14 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x11]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:14 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x2660840, &(0x7f0000001440)) 00:20:14 executing program 0: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x40186366, 0x0) 00:20:14 executing program 4: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x604041, 0x0) r0 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000000000), 0x10802, 0x0) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000080)) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f00000000c0)={0xc}) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:14 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = accept4$phonet_pipe(0xffffffffffffffff, &(0x7f0000000000), &(0x7f0000000040)=0x10, 0x0) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f0000000080), &(0x7f00000000c0)=0x4) 00:20:14 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0xfffffffffffffffc, 0x0) (async) r0 = syz_open_dev$dri(&(0x7f0000000000), 0xfffffffffffffffc, 0x0) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(r0, 0xc00864bf, &(0x7f0000000040)={0x0, 0x1}) 00:20:14 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0x2, 0x0) 00:20:14 executing program 0: r0 = socket$phonet_pipe(0x23, 0x5, 0x2) connect$phonet_pipe(r0, &(0x7f0000000100)={0x23, 0x2, 0x6}, 0x10) connect$phonet_pipe(r0, &(0x7f0000000000)={0x23, 0x0, 0x47, 0x4}, 0x10) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000140)) r1 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000340), 0x6aa80, 0x0) getsockopt$PNPIPE_HANDLE(r1, 0x113, 0x3, &(0x7f0000000040), &(0x7f0000000080)=0x4) setsockopt$PNPIPE_INITSTATE(r1, 0x113, 0x4, &(0x7f00000000c0), 0x4) 00:20:14 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x12]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:14 executing program 4: ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000000040)={&(0x7f0000000000)=[0x0], 0x1, 0x80000, 0x0, 0xffffffffffffffff}) ioctl$UDMABUF_CREATE_LIST(0xffffffffffffffff, 0x40087543, &(0x7f0000000080)={0x1, 0x2, [{r0, 0x0, 0x1000, 0x1000}, {0xffffffffffffffff, 0x0, 0x10000, 0x2000}]}) 00:20:14 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x3000000, &(0x7f0000001440)) 00:20:15 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x111000, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:15 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = accept4$phonet_pipe(0xffffffffffffffff, &(0x7f0000000000), &(0x7f0000000040)=0x10, 0x0) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f0000000080), &(0x7f00000000c0)=0x4) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) accept4$phonet_pipe(0xffffffffffffffff, &(0x7f0000000000), &(0x7f0000000040)=0x10, 0x0) (async) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f0000000080), &(0x7f00000000c0)=0x4) (async) 00:20:15 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x401c5820, 0x0) 00:20:15 executing program 3: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000080)={&(0x7f0000000040)=[0x0], 0x1, 0x1800}) 00:20:15 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x18]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:15 executing program 1: ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000000040)={&(0x7f0000000000)=[0x0], 0x1, 0x80000, 0x0, 0xffffffffffffffff}) ioctl$UDMABUF_CREATE_LIST(0xffffffffffffffff, 0x40087543, &(0x7f0000000080)={0x1, 0x2, [{r0, 0x0, 0x1000, 0x1000}, {0xffffffffffffffff, 0x0, 0x10000, 0x2000}]}) 00:20:15 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x4000000, &(0x7f0000001440)) 00:20:15 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x111000, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:15 executing program 4: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000080)={&(0x7f0000000040)=[0x0], 0x1, 0x1800}) 00:20:15 executing program 1: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x4020940d, 0x0) 00:20:15 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:15 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000000080)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x5, 0x800, 0x0, 0xffffffffffffffff}) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000100)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f00000000c0)) 00:20:15 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000140)={0x48, 0x8, r0, 0x0, 0x4, 0x26d55c, 0xd7, &(0x7f0000000040)="e5ceabb497c7deceabafb051e351a189f6636c7c2c6824dee7d6910aab5401cba3f24f17f3121b06902277caa47d24ecd5c4e29a48fb9c6c6f6c32e524f469826f40fcefba553d8283c69abd7c85ae5086785c3ab5da6fbfacdec8ba629038868a787d258ace1e78dd92c4ba73a1df3c03c1bc7917d5b534681383e59220b4b57eb553b771f34c34c055c278208316c1351e56f9021403a170dd224f5d88ecc0b8019b9b5450362da2631f336f3390872ab6a1f7d8393842fe5c16cdd2456963a3ae1d80c27c1cb81c6b563f1dc7bde9ccbcb4cd8c5dfb", 0x1}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:15 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x4000000, &(0x7f0000001440)) 00:20:15 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:15 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5000000, &(0x7f0000001440)) 00:20:15 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc}) (async) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000140)={0x48, 0x8, r0, 0x0, 0x4, 0x26d55c, 0xd7, &(0x7f0000000040)="e5ceabb497c7deceabafb051e351a189f6636c7c2c6824dee7d6910aab5401cba3f24f17f3121b06902277caa47d24ecd5c4e29a48fb9c6c6f6c32e524f469826f40fcefba553d8283c69abd7c85ae5086785c3ab5da6fbfacdec8ba629038868a787d258ace1e78dd92c4ba73a1df3c03c1bc7917d5b534681383e59220b4b57eb553b771f34c34c055c278208316c1351e56f9021403a170dd224f5d88ecc0b8019b9b5450362da2631f336f3390872ab6a1f7d8393842fe5c16cdd2456963a3ae1d80c27c1cb81c6b563f1dc7bde9ccbcb4cd8c5dfb", 0x1}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:15 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x48]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:15 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000000080)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x5, 0x800, 0x0, 0xffffffffffffffff}) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000100)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f00000000c0)) 00:20:15 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x80086301, 0x0) 00:20:15 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x6000000, &(0x7f0000001440)) 00:20:15 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) 00:20:15 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:15 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x4c]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:15 executing program 0: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x80086601, 0x0) 00:20:15 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:15 executing program 3: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x7ff, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000100)={&(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x5, 0x80000}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f0000000080)={&(0x7f0000000040)=[0x0], 0x1}) 00:20:15 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:15 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x7000000, &(0x7f0000001440)) 00:20:15 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:15 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x801c581f, 0x0) 00:20:15 executing program 2: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x7ff, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000100)={&(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x5, 0x80000}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f0000000080)={&(0x7f0000000040)=[0x0], 0x1}) 00:20:15 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0x3, 0x0) 00:20:15 executing program 0: r0 = socket$phonet_pipe(0x23, 0x5, 0x2) connect$phonet_pipe(r0, &(0x7f0000000100)={0x23, 0x2, 0x6}, 0x10) connect$phonet_pipe(r0, &(0x7f0000000000)={0x23, 0x0, 0x47, 0x4}, 0x10) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000000140)) (async) r1 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000340), 0x6aa80, 0x0) getsockopt$PNPIPE_HANDLE(r1, 0x113, 0x3, &(0x7f0000000040), &(0x7f0000000080)=0x4) setsockopt$PNPIPE_INITSTATE(r1, 0x113, 0x4, &(0x7f00000000c0), 0x4) 00:20:15 executing program 1: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) 00:20:15 executing program 4: r0 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f00000000c0), 0x10400, 0x0) r1 = ioctl$LOOP_CTL_GET_FREE(r0, 0x4c82) ioctl$LOOP_CTL_ADD(r0, 0x4c80, r1) ioctl$LOOP_CTL_ADD(r0, 0x4c80, 0xa) r2 = accept4$phonet_pipe(r0, &(0x7f0000000100), &(0x7f0000000040)=0x10, 0x0) ioctl$sock_SIOCGIFINDEX_80211(r2, 0x8933, &(0x7f0000000080)={'wlan0\x00'}) r3 = gettid() syz_open_procfs$namespace(r3, 0x0) 00:20:15 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x6c]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:15 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x8000000, &(0x7f0000001440)) 00:20:15 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(r0, 0x3ba0, &(0x7f0000000040)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000140)={0x48, 0x8, r1, 0x0, 0x2, 0x1bce43, 0x5e, &(0x7f00000000c0)="ed73e983bb5e2b435f034af53fbe1a0366631891e860b36cfee6e40b3d493c711c3460fcbec96b10ed85c67997b92d82379bbcca098c8f9540ef18e423db69ebe01d30be518cd5666d658be59d1b7610e2f2cad30fa7c4985d187f2ab707", 0x10000}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:15 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0xc0045878, 0x0) 00:20:15 executing program 0: syz_open_dev$dri(&(0x7f0000000000), 0x4, 0x0) 00:20:15 executing program 1: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x2, 0x0) 00:20:16 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x74]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:16 executing program 4: socket$inet6_sctp(0xa, 0x5, 0x84) syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) 00:20:16 executing program 2: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) ioctl$USBDEVFS_DISCONNECT_CLAIM(r0, 0x8108551b, &(0x7f0000000040)={0x5, 0x2, "5e347533cfb5692f0f9c404ac12dbddf87a3a8a4f7f9091c97e2d5b6c1d9166ad2b2ebcd80a3a9c88737c61d2b63885be201edff5efe37da53a8ef4db7295f556540109c01b86001ed2e51f458d75475c1ca4f061b55345a3c2ef0a8651452773917fa14e19ced53fca503c50624ff4a1501199a8f12b9490d58cd35e787396380ce029e8d81d6732ffe5e1f85f2361a9d354de8b99c362e15fae56dab68fbc9f1933a5fca737da7849e01175afc7f0d7674feac3a840875bcb944da9171d1a68dd30c3b6d218b20f61d0046feab0f3700ab9fdfd95f25168611c1f9377020307457ee58a585923aa66f8f78309581df5dbc93ac35ff64d7b34560f83d05f47b"}) 00:20:16 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x9000000, &(0x7f0000001440)) 00:20:16 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0xc0045878, 0x0) 00:20:16 executing program 1: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) (async) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(r0, 0x3ba0, &(0x7f0000000040)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000140)={0x48, 0x8, r1, 0x0, 0x2, 0x1bce43, 0x5e, &(0x7f00000000c0)="ed73e983bb5e2b435f034af53fbe1a0366631891e860b36cfee6e40b3d493c711c3460fcbec96b10ed85c67997b92d82379bbcca098c8f9540ef18e423db69ebe01d30be518cd5666d658be59d1b7610e2f2cad30fa7c4985d187f2ab707", 0x10000}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:16 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x74]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:16 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x7a]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:16 executing program 0: socket$inet6_sctp(0xa, 0x5, 0x84) (async) syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) 00:20:16 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0x5, 0x0) 00:20:16 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x9940440, &(0x7f0000001440)) 00:20:16 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(r0, 0x3ba0, &(0x7f0000000040)={0x48, 0x7, 0xffffffffffffffff, 0x0, 0x1, 0x0, 0x7, 0x1337da, 0x1467b7}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc}) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x5460, 0x0) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f00000000c0), &(0x7f0000000100)=0x4) 00:20:16 executing program 0: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0xc0189436, 0x0) 00:20:16 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0xe8]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:16 executing program 2: syz_open_dev$dri(&(0x7f0000000040), 0xffffffffffffffff, 0x0) 00:20:16 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0x6, 0x0) 00:20:16 executing program 0: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) ioctl$USBDEVFS_DISCONNECT_CLAIM(r0, 0x8108551b, &(0x7f0000000040)={0x5, 0x2, "5e347533cfb5692f0f9c404ac12dbddf87a3a8a4f7f9091c97e2d5b6c1d9166ad2b2ebcd80a3a9c88737c61d2b63885be201edff5efe37da53a8ef4db7295f556540109c01b86001ed2e51f458d75475c1ca4f061b55345a3c2ef0a8651452773917fa14e19ced53fca503c50624ff4a1501199a8f12b9490d58cd35e787396380ce029e8d81d6732ffe5e1f85f2361a9d354de8b99c362e15fae56dab68fbc9f1933a5fca737da7849e01175afc7f0d7674feac3a840875bcb944da9171d1a68dd30c3b6d218b20f61d0046feab0f3700ab9fdfd95f25168611c1f9377020307457ee58a585923aa66f8f78309581df5dbc93ac35ff64d7b34560f83d05f47b"}) 00:20:16 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xa000000, &(0x7f0000001440)) 00:20:16 executing program 2: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x3, 0x0) 00:20:16 executing program 4: r0 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f00000000c0), 0x10400, 0x0) r1 = ioctl$LOOP_CTL_GET_FREE(r0, 0x4c82) ioctl$LOOP_CTL_ADD(r0, 0x4c80, r1) (async) ioctl$LOOP_CTL_ADD(r0, 0x4c80, 0xa) (async) r2 = accept4$phonet_pipe(r0, &(0x7f0000000100), &(0x7f0000000040)=0x10, 0x0) ioctl$sock_SIOCGIFINDEX_80211(r2, 0x8933, &(0x7f0000000080)={'wlan0\x00'}) (async) r3 = gettid() syz_open_procfs$namespace(r3, 0x0) 00:20:16 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:16 executing program 0: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(r0, 0x3ba0, &(0x7f0000000040)={0x48, 0x7, 0xffffffffffffffff, 0x0, 0x1, 0x0, 0x7, 0x1337da, 0x1467b7}) (async, rerun: 32) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc}) (async, rerun: 32) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x5460, 0x0) (async, rerun: 32) getsockopt$PNPIPE_ENCAP(r1, 0x113, 0x1, &(0x7f00000000c0), &(0x7f0000000100)=0x4) (rerun: 32) 00:20:16 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0xc020660b, 0x0) 00:20:16 executing program 4: syz_open_dev$dri(&(0x7f0000000040), 0xffffffffffffffff, 0x0) 00:20:16 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0x7, 0x0) 00:20:16 executing program 1: syz_open_dev$radio(&(0x7f0000000000), 0x1, 0x2) r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x800, 0x0) ioctl$DRM_IOCTL_MODE_GETENCODER(r0, 0xc01464a6, &(0x7f0000000040)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f00000001c0)={&(0x7f0000000080)=[0x0, 0x0], &(0x7f00000000c0)=[{}], &(0x7f0000000140)=[0x0], &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x1, 0x1, 0x2}) r3 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r3, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0], 0x1, 0xf235955baba59ed3, 0x0, 0xffffffffffffffff}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r4, 0xc04064a0, &(0x7f0000001680)={&(0x7f0000000600)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000015c0)=[0x0, 0x0], 0x0, 0x0, 0x6, 0x2}) ioctl$VIDIOC_EXPBUF(r0, 0xc0405610, &(0x7f00000005c0)={0x1, 0x0, 0x0, 0x18180}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r4, 0xc01864c6, &(0x7f0000000580)={&(0x7f0000000540)=[r2, r2], 0x2}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f0000000280)={&(0x7f0000000240)=[0x0, 0x0, 0x0, 0x0], 0x4}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000340)={&(0x7f00000002c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000300)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x5, 0x0, 0xeeeeeeee}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000480)={&(0x7f0000000380)=[0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0], &(0x7f0000000400)=[0x0], &(0x7f0000000440)=[0x0, 0x0], 0x2, 0x2, 0x1, 0x2}) r8 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000000640), 0x400001, 0x0) ioctl$DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE_FD(r3, 0xc01064c2, &(0x7f0000000680)={0x0, 0x0, r8}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000500)={&(0x7f00000004c0)=[r1, r2, r5, r6, r7], 0x5}) 00:20:16 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x2]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:16 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xb000000, &(0x7f0000001440)) 00:20:16 executing program 2: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x4, 0x0) 00:20:16 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000100)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x2, 0x8c57b, 0xce, &(0x7f0000000000)="5989f13e0334836d72bd244e7a0622b650052a0eabd68767a7b68fe9906d3995d9c0552ac018fabbf5249231dd295d36d8c7ee6db2af0128de48468e29432ff180a39235a9949d0f2cf96b9f22307b1a673e892117cbb78efa8a6f71329aa0c8c03af852a5e78e400025fd1739ec6cfcc1158ffe0a7c3768b09c69202fc72305c2e702fba100eb519dbc8c3044e995fd4d3376f86296673a8bebdd8aed1eefaae6037cc568ec5060990bb834836b8587d7ddbf47fc6e95bb6d9ab0fddb4a7406b89d90e48d76209f20c34ed9e46f", 0x10001}) 00:20:16 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x3]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:16 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) r0 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000001200)={0x0, 0x0, 0x0, r1}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x9, 0x6, 0x2, 0x9}) 00:20:16 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0x8, 0x0) 00:20:16 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x4]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:16 executing program 2: syz_open_dev$radio(&(0x7f0000000000), 0x1, 0x2) r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x800, 0x0) ioctl$DRM_IOCTL_MODE_GETENCODER(r0, 0xc01464a6, &(0x7f0000000040)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f00000001c0)={&(0x7f0000000080)=[0x0, 0x0], &(0x7f00000000c0)=[{}], &(0x7f0000000140)=[0x0], &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x1, 0x1, 0x2}) r3 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r3, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0], 0x1, 0xf235955baba59ed3, 0x0, 0xffffffffffffffff}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r4, 0xc04064a0, &(0x7f0000001680)={&(0x7f0000000600)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000015c0)=[0x0, 0x0], 0x0, 0x0, 0x6, 0x2}) ioctl$VIDIOC_EXPBUF(r0, 0xc0405610, &(0x7f00000005c0)={0x1, 0x0, 0x0, 0x18180}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r4, 0xc01864c6, &(0x7f0000000580)={&(0x7f0000000540)=[r2, r2], 0x2}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f0000000280)={&(0x7f0000000240)=[0x0, 0x0, 0x0, 0x0], 0x4}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000340)={&(0x7f00000002c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000300)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x5, 0x0, 0xeeeeeeee}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000480)={&(0x7f0000000380)=[0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0], &(0x7f0000000400)=[0x0], &(0x7f0000000440)=[0x0, 0x0], 0x2, 0x2, 0x1, 0x2}) r8 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000000640), 0x400001, 0x0) ioctl$DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE_FD(r3, 0xc01064c2, &(0x7f0000000680)={0x0, 0x0, r8}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000500)={&(0x7f00000004c0)=[r1, r2, r5, r6, r7], 0x5}) syz_open_dev$radio(&(0x7f0000000000), 0x1, 0x2) (async) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x800, 0x0) (async) ioctl$DRM_IOCTL_MODE_GETENCODER(r0, 0xc01464a6, &(0x7f0000000040)) (async) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f00000001c0)={&(0x7f0000000080)=[0x0, 0x0], &(0x7f00000000c0)=[{}], &(0x7f0000000140)=[0x0], &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0], 0x1, 0x1, 0x2}) (async) openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r3, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0], 0x1, 0xf235955baba59ed3}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r4, 0xc04064a0, &(0x7f0000001680)={&(0x7f0000000600)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000015c0)=[0x0, 0x0], 0x0, 0x0, 0x6, 0x2}) (async) ioctl$VIDIOC_EXPBUF(r0, 0xc0405610, &(0x7f00000005c0)={0x1, 0x0, 0x0, 0x18180}) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r4, 0xc01864c6, &(0x7f0000000580)={&(0x7f0000000540)=[r2, r2], 0x2}) (async) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f0000000280)={&(0x7f0000000240)=[0x0, 0x0, 0x0, 0x0], 0x4}) (async) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000340)={&(0x7f00000002c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000300)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x5, 0x0, 0xeeeeeeee}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000480)={&(0x7f0000000380)=[0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0], &(0x7f0000000400)=[0x0], &(0x7f0000000440)=[0x0, 0x0], 0x2, 0x2, 0x1, 0x2}) (async) openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000000640), 0x400001, 0x0) (async) ioctl$DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE_FD(r3, 0xc01064c2, &(0x7f0000000680)={0x0, 0x0, r8}) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000500)={&(0x7f00000004c0)=[r1, r2, r5, r6, r7], 0x5}) (async) 00:20:16 executing program 0: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000100)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x2, 0x8c57b, 0xce, &(0x7f0000000000)="5989f13e0334836d72bd244e7a0622b650052a0eabd68767a7b68fe9906d3995d9c0552ac018fabbf5249231dd295d36d8c7ee6db2af0128de48468e29432ff180a39235a9949d0f2cf96b9f22307b1a673e892117cbb78efa8a6f71329aa0c8c03af852a5e78e400025fd1739ec6cfcc1158ffe0a7c3768b09c69202fc72305c2e702fba100eb519dbc8c3044e995fd4d3376f86296673a8bebdd8aed1eefaae6037cc568ec5060990bb834836b8587d7ddbf47fc6e95bb6d9ab0fddb4a7406b89d90e48d76209f20c34ed9e46f", 0x10001}) 00:20:16 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xb6620c0, &(0x7f0000001440)) 00:20:16 executing program 1: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x5, 0x0) 00:20:16 executing program 4: r0 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) write$tcp_congestion(r0, &(0x7f0000000000)='vegas\x00', 0x6) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x1dd001, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, 0x0) 00:20:16 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x5]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:16 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) (async, rerun: 32) r0 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) (rerun: 32) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000001200)={0x0, 0x0, 0x0, r1}) (async, rerun: 64) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x9, 0x6, 0x2, 0x9}) (rerun: 64) 00:20:16 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0x9, 0x0) 00:20:16 executing program 4: socket$phonet_pipe(0x23, 0x5, 0x2) r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) r1 = seccomp$SECCOMP_SET_MODE_FILTER_LISTENER(0x1, 0xc, &(0x7f0000000080)={0x3, &(0x7f0000000040)=[{0x8, 0x7f, 0x20, 0x9}, {0x4, 0x6, 0x1, 0xffff}, {0x767e, 0x7, 0x8, 0x2}]}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0, 0x0, 0x0], 0x3, 0xf235955baba59ed3, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(0xffffffffffffffff, 0xc00464c9, &(0x7f0000001240)={r2}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f00000001c0)={r2}) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f00000000c0)={0x0}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f00000002c0)={&(0x7f0000000180)=[0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000240), &(0x7f0000000280), 0x3, 0x7}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r1, 0x40182103, &(0x7f0000000140)={r3, 0x0, r0, 0xff, 0x80000}) 00:20:16 executing program 3: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x6, 0x0) 00:20:16 executing program 0: r0 = gettid() syz_open_procfs$namespace(r0, &(0x7f0000000000)='ns/cgroup\x00') syz_open_procfs$namespace(r0, &(0x7f0000000080)='ns/time_for_children\x00') syz_open_procfs$namespace(r0, &(0x7f0000000040)='ns/pid\x00') 00:20:16 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x6]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:16 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc000000, &(0x7f0000001440)) 00:20:16 executing program 3: r0 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) write$tcp_congestion(r0, &(0x7f0000000000)='vegas\x00', 0x6) (async) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x1dd001, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, 0x0) 00:20:16 executing program 2: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x7, 0x0) 00:20:16 executing program 1: syz_open_dev$dri(&(0x7f0000000040), 0xfffffffffffffffc, 0x4280) 00:20:16 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x6]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:16 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x7]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:16 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0xa, 0x0) 00:20:16 executing program 0: r0 = socket$inet6_udplite(0xa, 0x2, 0x88) getsockopt$PNPIPE_IFINDEX(0xffffffffffffffff, 0x113, 0x2, &(0x7f0000000080)=0x0, &(0x7f00000000c0)=0x4) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r0, 0x89f0, &(0x7f0000000180)={'syztnl1\x00', &(0x7f0000000100)={'ip6tnl0\x00', r1, 0x4, 0xe3, 0xf8, 0x6, 0x8, @dev={0xfe, 0x80, '\x00', 0x39}, @loopback, 0x1, 0x10, 0x10000, 0x7966}}) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r0, 0x89f0, &(0x7f0000000300)={'syztnl0\x00', &(0x7f0000000280)={'syztnl0\x00', r1, 0x4, 0x9, 0x7f, 0x80000000, 0x8, @private2, @private2, 0x20, 0x20, 0x10000, 0x3f}}) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(r0, 0x89f2, &(0x7f00000003c0)={'syztnl2\x00', &(0x7f0000000340)={'syztnl2\x00', r2, 0x4, 0x2, 0x4, 0x52bd, 0x12, @ipv4={'\x00', '\xff\xff', @empty}, @dev={0xfe, 0x80, '\x00', 0x29}, 0xeeeb24e358ecac27, 0x8, 0x3, 0x6}}) r3 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x109b01, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r3, 0x3b88, &(0x7f0000000000)={0xc}) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(r0, 0x89f2, &(0x7f00000004c0)={'syztnl2\x00', &(0x7f0000000440)={'syztnl1\x00', r1, 0x4, 0x87, 0x7, 0x4, 0x0, @initdev={0xfe, 0x88, '\x00', 0x1, 0x0}, @private1={0xfc, 0x1, '\x00', 0x1}, 0x40, 0x20, 0xffffffc1}}) r4 = socket$inet6_udplite(0xa, 0x2, 0x88) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r0, 0x89f0, &(0x7f0000000580)={'syztnl0\x00', &(0x7f0000000500)={'syztnl1\x00', r2, 0x2f, 0x1, 0x40, 0x5, 0x60, @ipv4={'\x00', '\xff\xff', @rand_addr=0x64010102}, @ipv4={'\x00', '\xff\xff', @initdev={0xac, 0x1e, 0x0, 0x0}}, 0x0, 0x8, 0x2, 0x401}}) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r4, 0x89f0, &(0x7f0000000640)={'syztnl2\x00', &(0x7f00000005c0)={'syztnl0\x00', r5, 0x4, 0x80, 0x9, 0x8, 0x20, @empty, @private0={0xfc, 0x0, '\x00', 0x1}, 0x10, 0x80, 0x3, 0x7}}) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(r0, 0x89f2, &(0x7f0000000240)={'ip6gre0\x00', &(0x7f00000001c0)={'syztnl2\x00', r1, 0x29, 0x9, 0x66, 0xf3b, 0x54, @dev={0xfe, 0x80, '\x00', 0x38}, @dev={0xfe, 0x80, '\x00', 0x1e}, 0x8, 0x7, 0xacd, 0x6}}) ioctl$IOMMU_VFIO_IOAS$GET(r3, 0x3b88, 0x0) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000400)) 00:20:17 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xd000000, &(0x7f0000001440)) 00:20:17 executing program 4: socket$phonet_pipe(0x23, 0x5, 0x2) (async) r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) (async) r1 = seccomp$SECCOMP_SET_MODE_FILTER_LISTENER(0x1, 0xc, &(0x7f0000000080)={0x3, &(0x7f0000000040)=[{0x8, 0x7f, 0x20, 0x9}, {0x4, 0x6, 0x1, 0xffff}, {0x767e, 0x7, 0x8, 0x2}]}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0, 0x0, 0x0], 0x3, 0xf235955baba59ed3, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(0xffffffffffffffff, 0xc00464c9, &(0x7f0000001240)={r2}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f00000001c0)={r2}) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f00000000c0)={0x0}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f00000002c0)={&(0x7f0000000180)=[0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000240), &(0x7f0000000280), 0x3, 0x7}) (async) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r1, 0x40182103, &(0x7f0000000140)={r3, 0x0, r0, 0xff, 0x80000}) 00:20:17 executing program 2: syz_open_dev$dri(&(0x7f0000000040), 0xfffffffffffffffc, 0x4280) syz_open_dev$dri(&(0x7f0000000040), 0xfffffffffffffffc, 0x4280) (async) 00:20:17 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x8]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:17 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0xb, 0x0) 00:20:17 executing program 3: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x8, 0x0) 00:20:17 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x9]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:17 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xd942040, &(0x7f0000001440)) 00:20:17 executing program 2: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f00000000c0)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0], 0x4}) 00:20:17 executing program 3: r0 = gettid() syz_open_procfs$namespace(r0, &(0x7f0000000000)='ns/cgroup\x00') syz_open_procfs$namespace(r0, &(0x7f0000000080)='ns/time_for_children\x00') (async) syz_open_procfs$namespace(r0, &(0x7f0000000080)='ns/time_for_children\x00') syz_open_procfs$namespace(r0, &(0x7f0000000040)='ns/pid\x00') 00:20:17 executing program 4: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x9, 0x0) 00:20:17 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xe000000, &(0x7f0000001440)) 00:20:17 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0xa]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:17 executing program 0: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f00000000c0)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0], 0x4}) 00:20:17 executing program 4: r0 = socket$inet6_udplite(0xa, 0x2, 0x88) getsockopt$PNPIPE_IFINDEX(0xffffffffffffffff, 0x113, 0x2, &(0x7f0000000080)=0x0, &(0x7f00000000c0)=0x4) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r0, 0x89f0, &(0x7f0000000180)={'syztnl1\x00', &(0x7f0000000100)={'ip6tnl0\x00', r1, 0x4, 0xe3, 0xf8, 0x6, 0x8, @dev={0xfe, 0x80, '\x00', 0x39}, @loopback, 0x1, 0x10, 0x10000, 0x7966}}) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r0, 0x89f0, &(0x7f0000000300)={'syztnl0\x00', &(0x7f0000000280)={'syztnl0\x00', r1, 0x4, 0x9, 0x7f, 0x80000000, 0x8, @private2, @private2, 0x20, 0x20, 0x10000, 0x3f}}) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(r0, 0x89f2, &(0x7f00000003c0)={'syztnl2\x00', &(0x7f0000000340)={'syztnl2\x00', r2, 0x4, 0x2, 0x4, 0x52bd, 0x12, @ipv4={'\x00', '\xff\xff', @empty}, @dev={0xfe, 0x80, '\x00', 0x29}, 0xeeeb24e358ecac27, 0x8, 0x3, 0x6}}) r3 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x109b01, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r3, 0x3b88, &(0x7f0000000000)={0xc}) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(r0, 0x89f2, &(0x7f00000004c0)={'syztnl2\x00', &(0x7f0000000440)={'syztnl1\x00', r1, 0x4, 0x87, 0x7, 0x4, 0x0, @initdev={0xfe, 0x88, '\x00', 0x1, 0x0}, @private1={0xfc, 0x1, '\x00', 0x1}, 0x40, 0x20, 0xffffffc1}}) r4 = socket$inet6_udplite(0xa, 0x2, 0x88) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r0, 0x89f0, &(0x7f0000000580)={'syztnl0\x00', &(0x7f0000000500)={'syztnl1\x00', r2, 0x2f, 0x1, 0x40, 0x5, 0x60, @ipv4={'\x00', '\xff\xff', @rand_addr=0x64010102}, @ipv4={'\x00', '\xff\xff', @initdev={0xac, 0x1e, 0x0, 0x0}}, 0x0, 0x8, 0x2, 0x401}}) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r4, 0x89f0, &(0x7f0000000640)={'syztnl2\x00', &(0x7f00000005c0)={'syztnl0\x00', r5, 0x4, 0x80, 0x9, 0x8, 0x20, @empty, @private0={0xfc, 0x0, '\x00', 0x1}, 0x10, 0x80, 0x3, 0x7}}) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(r0, 0x89f2, &(0x7f0000000240)={'ip6gre0\x00', &(0x7f00000001c0)={'syztnl2\x00', r1, 0x29, 0x9, 0x66, 0xf3b, 0x54, @dev={0xfe, 0x80, '\x00', 0x38}, @dev={0xfe, 0x80, '\x00', 0x1e}, 0x8, 0x7, 0xacd, 0x6}}) ioctl$IOMMU_VFIO_IOAS$GET(r3, 0x3b88, 0x0) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000400)) socket$inet6_udplite(0xa, 0x2, 0x88) (async) getsockopt$PNPIPE_IFINDEX(0xffffffffffffffff, 0x113, 0x2, &(0x7f0000000080), &(0x7f00000000c0)=0x4) (async) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r0, 0x89f0, &(0x7f0000000180)={'syztnl1\x00', &(0x7f0000000100)={'ip6tnl0\x00', r1, 0x4, 0xe3, 0xf8, 0x6, 0x8, @dev={0xfe, 0x80, '\x00', 0x39}, @loopback, 0x1, 0x10, 0x10000, 0x7966}}) (async) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r0, 0x89f0, &(0x7f0000000300)={'syztnl0\x00', &(0x7f0000000280)={'syztnl0\x00', r1, 0x4, 0x9, 0x7f, 0x80000000, 0x8, @private2, @private2, 0x20, 0x20, 0x10000, 0x3f}}) (async) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(r0, 0x89f2, &(0x7f00000003c0)={'syztnl2\x00', &(0x7f0000000340)={'syztnl2\x00', r2, 0x4, 0x2, 0x4, 0x52bd, 0x12, @ipv4={'\x00', '\xff\xff', @empty}, @dev={0xfe, 0x80, '\x00', 0x29}, 0xeeeb24e358ecac27, 0x8, 0x3, 0x6}}) (async) openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x109b01, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r3, 0x3b88, &(0x7f0000000000)={0xc}) (async) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(r0, 0x89f2, &(0x7f00000004c0)={'syztnl2\x00', &(0x7f0000000440)={'syztnl1\x00', r1, 0x4, 0x87, 0x7, 0x4, 0x0, @initdev={0xfe, 0x88, '\x00', 0x1, 0x0}, @private1={0xfc, 0x1, '\x00', 0x1}, 0x40, 0x20, 0xffffffc1}}) (async) socket$inet6_udplite(0xa, 0x2, 0x88) (async) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r0, 0x89f0, &(0x7f0000000580)={'syztnl0\x00', &(0x7f0000000500)={'syztnl1\x00', r2, 0x2f, 0x1, 0x40, 0x5, 0x60, @ipv4={'\x00', '\xff\xff', @rand_addr=0x64010102}, @ipv4={'\x00', '\xff\xff', @initdev={0xac, 0x1e, 0x0, 0x0}}, 0x0, 0x8, 0x2, 0x401}}) (async) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r4, 0x89f0, &(0x7f0000000640)={'syztnl2\x00', &(0x7f00000005c0)={'syztnl0\x00', r5, 0x4, 0x80, 0x9, 0x8, 0x20, @empty, @private0={0xfc, 0x0, '\x00', 0x1}, 0x10, 0x80, 0x3, 0x7}}) (async) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(r0, 0x89f2, &(0x7f0000000240)={'ip6gre0\x00', &(0x7f00000001c0)={'syztnl2\x00', r1, 0x29, 0x9, 0x66, 0xf3b, 0x54, @dev={0xfe, 0x80, '\x00', 0x38}, @dev={0xfe, 0x80, '\x00', 0x1e}, 0x8, 0x7, 0xacd, 0x6}}) (async) ioctl$IOMMU_VFIO_IOAS$GET(r3, 0x3b88, 0x0) (async) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000400)) (async) 00:20:17 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0xc, 0x0) 00:20:17 executing program 1: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000140)={0x0, 0x1, r0, 0x6, 0x80000}) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000080)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000100)={r1, 0x2, r0, 0x8001}) getsockname$inet(r0, &(0x7f0000000000)={0x2, 0x0, @multicast1}, &(0x7f0000000040)=0x10) getsockopt$EBT_SO_GET_INIT_INFO(r0, 0x0, 0x82, &(0x7f0000000180)={'nat\x00', 0x0, 0x0, 0x0, [0x8001, 0x7fff, 0xf09, 0x6, 0x100000000, 0xef]}, &(0x7f0000000200)=0x78) 00:20:17 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x10000000, &(0x7f0000001440)) 00:20:17 executing program 2: r0 = syz_open_dev$dri(&(0x7f0000000040), 0x3, 0x4082c0) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(r0, 0xc00864bf, &(0x7f0000000000)={0x0, 0x1}) 00:20:17 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0xb]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:17 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0xd, 0x0) 00:20:17 executing program 2: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000140)={0x0, 0x1, r0, 0x6, 0x80000}) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000080)) (async) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000080)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000100)={r1, 0x2, r0, 0x8001}) getsockname$inet(r0, &(0x7f0000000000)={0x2, 0x0, @multicast1}, &(0x7f0000000040)=0x10) (async) getsockname$inet(r0, &(0x7f0000000000)={0x2, 0x0, @multicast1}, &(0x7f0000000040)=0x10) getsockopt$EBT_SO_GET_INIT_INFO(r0, 0x0, 0x82, &(0x7f0000000180)={'nat\x00', 0x0, 0x0, 0x0, [0x8001, 0x7fff, 0xf09, 0x6, 0x100000000, 0xef]}, &(0x7f0000000200)=0x78) 00:20:17 executing program 1: r0 = syz_open_dev$dri(&(0x7f0000000040), 0x3, 0x4082c0) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(r0, 0xc00864bf, &(0x7f0000000000)={0x0, 0x1}) 00:20:17 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0xe, 0x0) 00:20:17 executing program 3: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0xa, 0x0) 00:20:17 executing program 0: syz_open_dev$media(&(0x7f0000000000), 0xd49, 0x420002) r0 = syz_open_dev$media(&(0x7f0000000080), 0x0, 0x0) ioctl$MEDIA_IOC_ENUM_LINKS(r0, 0xc0287c02, &(0x7f0000000180)={0x80000000, 0x0, &(0x7f0000000100)=[{{}, {0x80000000}}, {{0x80000000}}]}) ioctl$MEDIA_IOC_ENUM_LINKS(r0, 0xc0287c02, &(0x7f00000003c0)={r2, 0x0, 0x0}) r3 = syz_open_dev$media(&(0x7f0000000200), 0x5, 0x40) ioctl$MEDIA_IOC_ENUM_LINKS(r3, 0xc0287c02, &(0x7f00000001c0)={r2, &(0x7f0000000040), &(0x7f0000000080)}) r4 = gettid() ioctl$MEDIA_IOC_ENUM_LINKS(r0, 0xc0287c02, &(0x7f0000000300)={r1, &(0x7f0000000240), &(0x7f0000000280)}) syz_open_procfs$namespace(r4, 0x0) 00:20:17 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0xc]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:17 executing program 2: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000000)={0x0}) r2 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000080), 0x143281, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f00000000c0)={r1, 0x1, r2, 0x101, 0x80000}) 00:20:17 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x11000000, &(0x7f0000001440)) 00:20:17 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0x10, 0x0) 00:20:17 executing program 1: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0xb, 0x0) 00:20:17 executing program 2: syz_open_dev$media(&(0x7f0000000000), 0xd49, 0x420002) (async) syz_open_dev$media(&(0x7f0000000000), 0xd49, 0x420002) r0 = syz_open_dev$media(&(0x7f0000000080), 0x0, 0x0) ioctl$MEDIA_IOC_ENUM_LINKS(r0, 0xc0287c02, &(0x7f0000000180)={0x80000000, 0x0, &(0x7f0000000100)}) (async) ioctl$MEDIA_IOC_ENUM_LINKS(r0, 0xc0287c02, &(0x7f0000000180)={0x80000000, 0x0, &(0x7f0000000100)=[{{}, {0x80000000}}, {{0x80000000}}]}) ioctl$MEDIA_IOC_ENUM_LINKS(r0, 0xc0287c02, &(0x7f00000003c0)={r2, 0x0, 0x0}) r3 = syz_open_dev$media(&(0x7f0000000200), 0x5, 0x40) ioctl$MEDIA_IOC_ENUM_LINKS(r3, 0xc0287c02, &(0x7f00000001c0)={r2, &(0x7f0000000040), &(0x7f0000000080)}) (async) ioctl$MEDIA_IOC_ENUM_LINKS(r3, 0xc0287c02, &(0x7f00000001c0)={r2, &(0x7f0000000040), &(0x7f0000000080)}) r4 = gettid() ioctl$MEDIA_IOC_ENUM_LINKS(r0, 0xc0287c02, &(0x7f0000000300)={r1, &(0x7f0000000240), &(0x7f0000000280)}) syz_open_procfs$namespace(r4, 0x0) 00:20:17 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0x11, 0x0) 00:20:17 executing program 3: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) r1 = syz_open_dev$dri(&(0x7f00000002c0), 0x0, 0x80000) r2 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000280)={&(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0], &(0x7f0000000240)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x7, 0x1, 0x6}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r2, 0xc05064a7, &(0x7f00000006c0)={&(0x7f0000000500), 0x0, &(0x7f0000000640)=[0x0], &(0x7f0000000680), 0x0, 0x1, 0x0, 0x0, r6}) ioctl$DRM_IOCTL_MODE_SETPROPERTY(r2, 0xc01064ab, &(0x7f0000000740)={0x0, r7, r6}) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r1, 0xc04064aa, &(0x7f0000000140)={&(0x7f0000000080)=[0x0], &(0x7f00000000c0)=[{}, {}, {}], r7, 0x0, '\x00', 0x1, 0x3}) r8 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r8, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r8, 0xc05064a7, &(0x7f00000006c0)={&(0x7f0000000500), 0x0, &(0x7f0000000640)=[0x0], &(0x7f0000000680), 0x0, 0x1, 0x0, 0x0, r9}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r8, 0xc01064b5, &(0x7f0000000440)={&(0x7f0000000400)=[0x0], 0x1}) ioctl$DRM_IOCTL_MODE_SETPROPERTY(r8, 0xc01064ab, &(0x7f0000000740)={0x0, r10, r9}) r11 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r11, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r11, 0xc05064a7, &(0x7f00000006c0)={&(0x7f0000000500), 0x0, &(0x7f0000000640)=[0x0], &(0x7f0000000680), 0x0, 0x1, 0x0, 0x0, r12}) ioctl$DRM_IOCTL_MODE_SETPROPERTY(r11, 0xc01064ab, &(0x7f0000000740)={0x0, r13, r12}) ioctl$DRM_IOCTL_MODE_ATOMIC(r2, 0xc03864bc, &(0x7f00000003c0)={0x402, 0x2, &(0x7f0000000040)=[r7, r4], &(0x7f0000000300)=[0xffff, 0x1, 0x8001, 0x77f1, 0x4], &(0x7f0000000340)=[r7, r7, r10, r13, r5, r3, r7, r7, r7], &(0x7f0000000380)=[0x3], 0x0, 0x451}) 00:20:17 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0xd]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:17 executing program 0: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(0xffffffffffffffff, 0xc05064a7, &(0x7f00000001c0)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[{}, {}, {}], &(0x7f0000000140)=[0x0, 0x0, 0x0], &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x3, 0x3, 0x5}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(0xffffffffffffffff, 0xc02064b9, &(0x7f00000002c0)={&(0x7f0000000240)=[0x0], &(0x7f0000000280)=[0x0], 0x1, 0x0, 0xe0e0e0e0}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000300)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x5, 0x4, 0x7}) ioctl$DRM_IOCTL_MODE_GET_LEASE(0xffffffffffffffff, 0xc01064c8, &(0x7f00000004c0)={0x1, 0x0, &(0x7f0000000480)=[0x0]}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(0xffffffffffffffff, 0xc01064b5, &(0x7f0000000540)={&(0x7f0000000500)=[0x0, 0x0, 0x0], 0x3}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f00000005c0)={&(0x7f0000000580)=[r1, 0x0, 0x0, r2, 0x0, r3, r4, r5], 0x8}) 00:20:17 executing program 2: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000000)={0x0}) r2 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000080), 0x143281, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f00000000c0)={r1, 0x1, r2, 0x101, 0x80000}) 00:20:17 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x12000000, &(0x7f0000001440)) 00:20:18 executing program 3: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0xc, 0x0) 00:20:18 executing program 1: r0 = gettid() syz_open_procfs$namespace(r0, 0x0) 00:20:18 executing program 2: gettid() ioctl$SECCOMP_IOCTL_NOTIF_RECV(0xffffffffffffffff, 0xc0502100, &(0x7f0000000080)={0x0, 0x0}) syz_open_procfs$namespace(r0, &(0x7f0000000040)='ns/uts\x00') 00:20:18 executing program 0: syz_open_procfs$namespace(0x0, &(0x7f0000001700)='ns/time_for_children\x00') 00:20:18 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0xe]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:18 executing program 0: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(0xffffffffffffffff, 0xc05064a7, &(0x7f00000001c0)={&(0x7f0000000000)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000040)=[{}, {}, {}], &(0x7f0000000140)=[0x0, 0x0, 0x0], &(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x3, 0x3, 0x5}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(0xffffffffffffffff, 0xc02064b9, &(0x7f00000002c0)={&(0x7f0000000240)=[0x0], &(0x7f0000000280)=[0x0], 0x1, 0x0, 0xe0e0e0e0}) (async) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(0xffffffffffffffff, 0xc02064b9, &(0x7f00000002c0)={&(0x7f0000000240)=[0x0], &(0x7f0000000280)=[0x0], 0x1, 0x0, 0xe0e0e0e0}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000300)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x5, 0x4, 0x7}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000300)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x5, 0x4, 0x7}) ioctl$DRM_IOCTL_MODE_GET_LEASE(0xffffffffffffffff, 0xc01064c8, &(0x7f00000004c0)={0x1, 0x0, &(0x7f0000000480)=[0x0]}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(0xffffffffffffffff, 0xc01064b5, &(0x7f0000000540)={&(0x7f0000000500)=[0x0, 0x0, 0x0], 0x3}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f00000005c0)={&(0x7f0000000580)=[r1, 0x0, 0x0, r2, 0x0, r3, r4, r5], 0x8}) 00:20:18 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x18000000, &(0x7f0000001440)) 00:20:18 executing program 3: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) syz_open_dev$dri(&(0x7f00000002c0), 0x0, 0x80000) (async) r1 = syz_open_dev$dri(&(0x7f00000002c0), 0x0, 0x80000) r2 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000280)={&(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0], &(0x7f0000000240)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x7, 0x1, 0x6}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r2, 0xc05064a7, &(0x7f00000006c0)={&(0x7f0000000500), 0x0, &(0x7f0000000640)=[0x0], &(0x7f0000000680), 0x0, 0x1, 0x0, 0x0, r6}) ioctl$DRM_IOCTL_MODE_SETPROPERTY(r2, 0xc01064ab, &(0x7f0000000740)={0x0, r7, r6}) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r1, 0xc04064aa, &(0x7f0000000140)={&(0x7f0000000080)=[0x0], &(0x7f00000000c0)=[{}, {}, {}], r7, 0x0, '\x00', 0x1, 0x3}) (async) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r1, 0xc04064aa, &(0x7f0000000140)={&(0x7f0000000080)=[0x0], &(0x7f00000000c0)=[{}, {}, {}], r7, 0x0, '\x00', 0x1, 0x3}) syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) (async) r8 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r8, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r8, 0xc05064a7, &(0x7f00000006c0)={&(0x7f0000000500), 0x0, &(0x7f0000000640)=[0x0], &(0x7f0000000680), 0x0, 0x1, 0x0, 0x0, r9}) (async) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r8, 0xc05064a7, &(0x7f00000006c0)={&(0x7f0000000500), 0x0, &(0x7f0000000640)=[0x0], &(0x7f0000000680), 0x0, 0x1, 0x0, 0x0, r9}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r8, 0xc01064b5, &(0x7f0000000440)={&(0x7f0000000400)=[0x0], 0x1}) ioctl$DRM_IOCTL_MODE_SETPROPERTY(r8, 0xc01064ab, &(0x7f0000000740)={0x0, r10, r9}) r11 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r11, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r11, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r11, 0xc05064a7, &(0x7f00000006c0)={&(0x7f0000000500), 0x0, &(0x7f0000000640)=[0x0], &(0x7f0000000680), 0x0, 0x1, 0x0, 0x0, r12}) ioctl$DRM_IOCTL_MODE_SETPROPERTY(r11, 0xc01064ab, &(0x7f0000000740)={0x0, r13, r12}) ioctl$DRM_IOCTL_MODE_ATOMIC(r2, 0xc03864bc, &(0x7f00000003c0)={0x402, 0x2, &(0x7f0000000040)=[r7, r4], &(0x7f0000000300)=[0xffff, 0x1, 0x8001, 0x77f1, 0x4], &(0x7f0000000340)=[r7, r7, r10, r13, r5, r3, r7, r7, r7], &(0x7f0000000380)=[0x3], 0x0, 0x451}) 00:20:18 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0x12, 0x0) 00:20:18 executing program 4: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x595040, 0x0) 00:20:18 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x10]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:18 executing program 1: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0xd, 0x0) 00:20:18 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x80003, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x48e840, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000080)={0xc, 0x0}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(r0, 0x3ba0, &(0x7f00000000c0)={0x48, 0x7, 0xffffffffffffffff, 0x0, 0x1, 0x0, 0xa, 0x79d1a, 0x319cf7}) ioctl$IOMMU_TEST_OP_DESTROY_ACCESS_PAGES(r1, 0x3ba0, &(0x7f0000000140)={0x48, 0x6, r2, 0x0, r3}) 00:20:18 executing program 0: syz_open_dev$dri(&(0x7f0000000000), 0x18, 0x0) 00:20:18 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x11]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:18 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1b540000, &(0x7f0000001440)) 00:20:18 executing program 0: gettid() r0 = gettid() syz_open_procfs$namespace(r0, 0x0) syz_open_procfs$namespace(r0, 0x0) r1 = gettid() syz_open_procfs$namespace(r1, &(0x7f0000000000)='ns/time\x00') 00:20:18 executing program 3: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x595040, 0x0) 00:20:18 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x12]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:18 executing program 1: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000240), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000e80)={&(0x7f0000000c80)=[0x0, 0x0], &(0x7f0000000cc0)=[{}, {}, {}, {}], &(0x7f0000000e00)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000e40)=[0x0, 0x0], 0x4, 0xa, 0x2}) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r0, 0xc04064aa, &(0x7f0000000f00)={&(0x7f0000000bc0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000c40)=[{}], r2, 0x0, '\x00', 0x9, 0x1}) r3 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETENCODER(r3, 0xc01464a6, &(0x7f0000000340)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r3, 0xc01064c1, &(0x7f0000000780)) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, 0x0) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000180)={&(0x7f0000000100)=[0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], 0x1, 0x0, 0xbbbbbbbb}) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r1, 0xc04064aa, &(0x7f00000001c0)={&(0x7f0000000040), &(0x7f0000000440)=[{}, {}, {}, {}, {}], r5, 0x0, '\x00', 0x0, 0x5}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r1, 0xc02064b9, &(0x7f00000000c0)={&(0x7f0000000040)=[0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x3, r5}) r8 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000940), 0xa2001, 0x0) ioctl$DRM_IOCTL_MODE_GETPLANE(r1, 0xc02064b6, &(0x7f00000009c0)={0x0, r4, 0x0, 0x0, 0x0, 0xa, &(0x7f0000000980)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]}) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r8, 0xc01864b0, &(0x7f0000000a00)={r4, r9, 0x8, 0xfff, 0x7fffffff}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000b80)={&(0x7f0000000a80)=[0x0], &(0x7f0000000ac0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000b00)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000b40)=[0x0, 0x0], 0x1, 0x5, 0x6, 0x2}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000000900)={&(0x7f00000008c0)=[r7, r5, r7, r6], 0x4, 0x800, 0x0, 0xffffffffffffffff}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(0xffffffffffffffff, 0xc01064b5, &(0x7f0000000280)={&(0x7f0000000200)=[0x0], 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r1, 0xc05064a7, &(0x7f0000000380)={&(0x7f00000002c0)=[0x0, 0x0], &(0x7f0000000540)=[{}, {}, {}, {}, {}, {}, {}], &(0x7f0000000300)=[0x0], &(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], 0x7, 0x1, 0x2}) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(r10, 0xc00864bf, &(0x7f0000000f40)={0x0, 0x1}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000880)={&(0x7f0000000800)=[0x0, 0x0], &(0x7f0000000840)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x2, 0x0, 0xb0b0b0b0}) syz_open_dev$dri(&(0x7f00000007c0), 0xe9, 0x18001) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(0xffffffffffffffff, 0xc00864bf, &(0x7f0000000a40)={0x0, 0x1}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000000740)={&(0x7f0000000400)=[r6, r7, r11, r12], 0x4, 0x80000}) 00:20:18 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x80003, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) (async, rerun: 64) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x48e840, 0x0) (async, rerun: 64) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000080)={0xc, 0x0}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(r0, 0x3ba0, &(0x7f00000000c0)={0x48, 0x7, 0xffffffffffffffff, 0x0, 0x1, 0x0, 0xa, 0x79d1a, 0x319cf7}) ioctl$IOMMU_TEST_OP_DESTROY_ACCESS_PAGES(r1, 0x3ba0, &(0x7f0000000140)={0x48, 0x6, r2, 0x0, r3}) 00:20:18 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0x25, 0x0) 00:20:18 executing program 0: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0xe, 0x0) 00:20:18 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x1f581c80, &(0x7f0000001440)) 00:20:18 executing program 2: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) ioctl$VIDIOC_EXPBUF(r0, 0xc0405610, &(0x7f0000000040)={0xa, 0x773, 0x30, 0x80000}) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) 00:20:18 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x18]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:18 executing program 0: gettid() (async) r0 = gettid() syz_open_procfs$namespace(r0, 0x0) (async) syz_open_procfs$namespace(r0, 0x0) r1 = gettid() syz_open_procfs$namespace(r1, &(0x7f0000000000)='ns/time\x00') 00:20:18 executing program 3: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000240), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000e80)={&(0x7f0000000c80)=[0x0, 0x0], &(0x7f0000000cc0)=[{}, {}, {}, {}], &(0x7f0000000e00)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000e40)=[0x0, 0x0], 0x4, 0xa, 0x2}) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r0, 0xc04064aa, &(0x7f0000000f00)={&(0x7f0000000bc0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000c40)=[{}], r2, 0x0, '\x00', 0x9, 0x1}) r3 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETENCODER(r3, 0xc01464a6, &(0x7f0000000340)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r3, 0xc01064c1, &(0x7f0000000780)) (async) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r3, 0xc01064c1, &(0x7f0000000780)) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, 0x0) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000180)={&(0x7f0000000100)=[0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], 0x1, 0x0, 0xbbbbbbbb}) (async) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000180)={&(0x7f0000000100)=[0x0], &(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], 0x1, 0x0, 0xbbbbbbbb}) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r1, 0xc04064aa, &(0x7f00000001c0)={&(0x7f0000000040), &(0x7f0000000440)=[{}, {}, {}, {}, {}], r5, 0x0, '\x00', 0x0, 0x5}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r1, 0xc02064b9, &(0x7f00000000c0)={&(0x7f0000000040)=[0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x3, r5}) openat$drirender128(0xffffffffffffff9c, &(0x7f0000000940), 0xa2001, 0x0) (async) r8 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000940), 0xa2001, 0x0) ioctl$DRM_IOCTL_MODE_GETPLANE(r1, 0xc02064b6, &(0x7f00000009c0)={0x0, r4, 0x0, 0x0, 0x0, 0xa, &(0x7f0000000980)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]}) (async) ioctl$DRM_IOCTL_MODE_GETPLANE(r1, 0xc02064b6, &(0x7f00000009c0)={0x0, r4, 0x0, 0x0, 0x0, 0xa, &(0x7f0000000980)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0]}) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r8, 0xc01864b0, &(0x7f0000000a00)={r4, r9, 0x8, 0xfff, 0x7fffffff}) (async) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r8, 0xc01864b0, &(0x7f0000000a00)={r4, r9, 0x8, 0xfff, 0x7fffffff}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000b80)={&(0x7f0000000a80)=[0x0], &(0x7f0000000ac0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000b00)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000b40)=[0x0, 0x0], 0x1, 0x5, 0x6, 0x2}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000000900)={&(0x7f00000008c0)=[r7, r5, r7, r6], 0x4, 0x800, 0x0, 0xffffffffffffffff}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(0xffffffffffffffff, 0xc01064b5, &(0x7f0000000280)={&(0x7f0000000200)=[0x0], 0x1}) (async) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(0xffffffffffffffff, 0xc01064b5, &(0x7f0000000280)={&(0x7f0000000200)=[0x0], 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r1, 0xc05064a7, &(0x7f0000000380)={&(0x7f00000002c0)=[0x0, 0x0], &(0x7f0000000540)=[{}, {}, {}, {}, {}, {}, {}], &(0x7f0000000300)=[0x0], &(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], 0x7, 0x1, 0x2}) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(r10, 0xc00864bf, &(0x7f0000000f40)={0x0, 0x1}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000880)={&(0x7f0000000800)=[0x0, 0x0], &(0x7f0000000840)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x2, 0x0, 0xb0b0b0b0}) syz_open_dev$dri(&(0x7f00000007c0), 0xe9, 0x18001) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(0xffffffffffffffff, 0xc00864bf, &(0x7f0000000a40)={0x0, 0x1}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000000740)={&(0x7f0000000400)=[r6, r7, r11, r12], 0x4, 0x80000}) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000000740)={&(0x7f0000000400)=[r6, r7, r11, r12], 0x4, 0x80000}) 00:20:18 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x100040, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:18 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0x5c, 0x0) 00:20:18 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x20581c40, &(0x7f0000001440)) 00:20:18 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x100040, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:18 executing program 2: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) ioctl$VIDIOC_EXPBUF(r0, 0xc0405610, &(0x7f0000000040)={0xa, 0x773, 0x30, 0x80000}) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) 00:20:18 executing program 1: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f00000001c0)={&(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0xa}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0xa, 0x6, 0x6, 0x6}) 00:20:18 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0x300, 0x0) 00:20:18 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x21540000, &(0x7f0000001440)) 00:20:18 executing program 2: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x10, 0x0) 00:20:18 executing program 1: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000000)={0x0}) r2 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000080), 0x20000, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f00000000c0)={r1, 0x2, r2, 0x3}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000100)={r1, 0x1, r0, 0x1}) 00:20:18 executing program 4: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f00000001c0)={&(0x7f0000000180)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0xa}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0xa, 0x6, 0x6, 0x6}) 00:20:18 executing program 1: ioctl$IOMMU_VFIO_IOAS$GET(0xffffffffffffffff, 0x3b88, 0x0) r0 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) accept4$phonet_pipe(r0, 0x0, 0x0, 0x0) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(0xffffffffffffffff, 0x3ba0, &(0x7f0000000180)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff, 0x1}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(r0, 0x3ba0, &(0x7f0000000200)={0x48, 0x7, r1, 0x0, 0x0, 0x0, 0x5, 0x94037, 0x1349c2}) openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x8000, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(0xffffffffffffffff, 0x3b88, &(0x7f0000000040)={0xc}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES(0xffffffffffffffff, 0x3ba0, &(0x7f0000000080)={0x48, 0x7, 0xffffffffffffffff, 0x0, 0x0, 0x0, 0xfffffffffffffff8, 0x3e5}) 00:20:18 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0x500, 0x0) 00:20:18 executing program 2: r0 = gettid() syz_open_procfs$namespace(r0, &(0x7f0000000040)='ns/pid\x00') syz_open_procfs$namespace(r0, &(0x7f0000000000)='ns/net\x00') syz_open_procfs$namespace(r0, 0x0) 00:20:18 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x25000000, &(0x7f0000001440)) 00:20:18 executing program 3: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000000)={0x0}) r2 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000080), 0x20000, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f00000000c0)={r1, 0x2, r2, 0x3}) (async) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f00000000c0)={r1, 0x2, r2, 0x3}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000100)={r1, 0x1, r0, 0x1}) 00:20:18 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0x600, 0x0) 00:20:19 executing program 2: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x11, 0x0) 00:20:19 executing program 4: ioctl$IOMMU_VFIO_IOAS$GET(0xffffffffffffffff, 0x3b88, 0x0) r0 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) accept4$phonet_pipe(r0, 0x0, 0x0, 0x0) (async) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(0xffffffffffffffff, 0x3ba0, &(0x7f0000000180)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff, 0x1}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(r0, 0x3ba0, &(0x7f0000000200)={0x48, 0x7, r1, 0x0, 0x0, 0x0, 0x5, 0x94037, 0x1349c2}) (async) openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x8000, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(0xffffffffffffffff, 0x3b88, &(0x7f0000000040)={0xc}) (async) ioctl$IOMMU_TEST_OP_ACCESS_PAGES(0xffffffffffffffff, 0x3ba0, &(0x7f0000000080)={0x48, 0x7, 0xffffffffffffffff, 0x0, 0x0, 0x0, 0xfffffffffffffff8, 0x3e5}) 00:20:19 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x33890000, &(0x7f0000001440)) 00:20:19 executing program 1: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0, 0x0, 0x0], 0x3, 0xf235955baba59ed3, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(0xffffffffffffffff, 0xc00464c9, &(0x7f0000001240)={r1}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000040)={r1}) 00:20:19 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0x700, 0x0) 00:20:19 executing program 2: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0, 0x0, 0x0], 0x3, 0xf235955baba59ed3, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(0xffffffffffffffff, 0xc00464c9, &(0x7f0000001240)={r1}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000040)={r1}) 00:20:19 executing program 2: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0xa00, 0x0) 00:20:19 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) 00:20:19 executing program 1: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x12, 0x0) 00:20:19 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x369418c0, &(0x7f0000001440)) 00:20:19 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0x900, 0x0) 00:20:19 executing program 1: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc, 0x0}) ioctl$IOMMU_TEST_OP_DESTROY_ACCESS_PAGES(0xffffffffffffffff, 0x3ba0, &(0x7f0000000040)={0x48, 0x6, r1}) 00:20:19 executing program 3: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0xa00, 0x0) 00:20:19 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) (async) 00:20:19 executing program 3: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x18, 0x0) 00:20:19 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0xa00, 0x0) 00:20:19 executing program 2: r0 = gettid() syz_open_procfs$namespace(r0, &(0x7f0000000040)='ns/pid\x00') syz_open_procfs$namespace(r0, &(0x7f0000000000)='ns/net\x00') (async) syz_open_procfs$namespace(r0, &(0x7f0000000000)='ns/net\x00') syz_open_procfs$namespace(r0, 0x0) 00:20:19 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc, 0x0}) ioctl$IOMMU_TEST_OP_DESTROY_ACCESS_PAGES(0xffffffffffffffff, 0x3ba0, &(0x7f0000000040)={0x48, 0x6, r1}) 00:20:19 executing program 1: gettid() ioctl$SECCOMP_IOCTL_NOTIF_RECV(0xffffffffffffffff, 0xc0502100, &(0x7f0000000080)={0x0, 0x0}) syz_open_procfs$namespace(r0, &(0x7f0000000040)='ns/uts\x00') gettid() (async) ioctl$SECCOMP_IOCTL_NOTIF_RECV(0xffffffffffffffff, 0xc0502100, &(0x7f0000000080)) (async) syz_open_procfs$namespace(r0, &(0x7f0000000040)='ns/uts\x00') (async) 00:20:19 executing program 3: r0 = socket$inet(0x2, 0x80000, 0xffff) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000040)={'filter\x00', 0x7, 0x4, 0x408, 0x210, 0x210, 0x100, 0x320, 0x320, 0x320, 0x4, &(0x7f0000000000), {[{{@uncond, 0xc0, 0x100}, @unspec=@RATEEST={0x40, 'RATEEST\x00', 0x0, {'syz0\x00', 0x2, 0x1, {0x1}}}}, {{@arp={@broadcast, @multicast2, 0xff, 0xffffff00, 0xf, 0x0, {@empty, {[0xff, 0x0, 0xff, 0xff]}}, {@empty, {[0x0, 0xff]}}, 0x4, 0x7d4, 0x3f, 0x7fff, 0x4, 0x9, 'netpci0\x00', 'macvlan1\x00', {0xff}, {}, 0x0, 0x286}, 0xc0, 0x110}, @mangle={0x50, 'mangle\x00', 0x0, {@empty, @mac=@multicast, @broadcast, @dev={0xac, 0x14, 0x14, 0x33}, 0x2}}}, {{@uncond, 0xc0, 0x110}, @mangle={0x50, 'mangle\x00', 0x0, {@empty, @mac=@dev={'\xaa\xaa\xaa\xaa\xaa', 0x3f}, @loopback, @rand_addr=0x64010101, 0x4, 0xffffffff}}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x458) syz_open_procfs$namespace(0x0, &(0x7f0000001700)='ns/time_for_children\x00') [ 1219.468480][ T6190] Bluetooth: hci1: unexpected cc 0x0c03 length: 249 > 1 [ 1219.483990][ T6190] Bluetooth: hci1: unexpected cc 0x1003 length: 249 > 9 [ 1219.487656][ T6190] Bluetooth: hci1: unexpected cc 0x1001 length: 249 > 9 [ 1219.490897][ T6190] Bluetooth: hci1: unexpected cc 0x0c23 length: 249 > 4 00:20:19 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x400448c9, &(0x7f0000001440)) 00:20:19 executing program 4: r0 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) syz_genetlink_get_family_id$netlbl_unlabel(&(0x7f0000000140), r0) r1 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f00000000c0), 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r1, 0x40182103, 0x0) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x141282, 0x0) r2 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) r3 = syz_genetlink_get_family_id$nl802154(&(0x7f00000001c0), r0) ioctl$sock_SIOCGIFINDEX_802154(r0, 0x8933, &(0x7f0000000200)={'wpan4\x00', 0x0}) ioctl$sock_SIOCGIFINDEX_802154(r0, 0x8933, &(0x7f0000000240)={'wpan0\x00', 0x0}) ioctl$sock_SIOCGIFINDEX_802154(r0, 0x8933, &(0x7f0000000280)={'wpan0\x00', 0x0}) ioctl$sock_SIOCGIFINDEX_802154(r0, 0x8933, &(0x7f00000002c0)={'wpan3\x00', 0x0}) sendmsg$NL802154_CMD_GET_SEC_LEVEL(r2, &(0x7f00000003c0)={&(0x7f0000000180)={0x10, 0x0, 0x0, 0x8}, 0xc, &(0x7f0000000380)={&(0x7f0000000300)={0x6c, r3, 0x400, 0x70bd2c, 0x25dfdbff, {}, [@NL802154_ATTR_IFINDEX={0x8, 0x3, r4}, @NL802154_ATTR_WPAN_DEV={0xc, 0x6, 0x200000000}, @NL802154_ATTR_IFINDEX={0x8, 0x3, r5}, @NL802154_ATTR_WPAN_DEV={0xc, 0x6, 0x100000001}, @NL802154_ATTR_WPAN_DEV={0xc, 0x6, 0x100000001}, @NL802154_ATTR_IFINDEX={0x8}, @NL802154_ATTR_IFINDEX={0x8, 0x3, r6}, @NL802154_ATTR_WPAN_DEV={0xc, 0x6, 0x100000001}, @NL802154_ATTR_IFINDEX={0x8, 0x3, r7}]}, 0x6c}, 0x1, 0x0, 0x0, 0x20004000}, 0x20000000) r8 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) r9 = syz_genetlink_get_family_id$nfc(&(0x7f0000000440), r2) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000480)=0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f00000004c0)=0x0) sendmsg$NFC_CMD_DISABLE_SE(r2, &(0x7f0000000580)={&(0x7f0000000400)={0x10, 0x0, 0x0, 0x80000000}, 0xc, &(0x7f0000000540)={&(0x7f0000000500)={0x34, r9, 0x400, 0x70bd29, 0x25dfdbfe, {}, [@NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r10}, @NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r11}, @NFC_ATTR_SE_INDEX={0x8, 0x15, 0x2}, @NFC_ATTR_SE_INDEX={0x8}]}, 0x34}, 0x1, 0x0, 0x0, 0x20000000}, 0x4800) getsockopt$EBT_SO_GET_INIT_ENTRIES(r8, 0x0, 0x83, 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r1, 0xc0502100, &(0x7f0000000040)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r8, 0x40182103, &(0x7f0000000100)={r12, 0x0, r1, 0x7fffffff, 0x80000}) [ 1219.510274][ T6190] Bluetooth: hci1: unexpected cc 0x0c25 length: 249 > 3 [ 1219.555251][ T6190] Bluetooth: hci1: unexpected cc 0x0c38 length: 249 > 2 [ 1219.555343][ T1942] netdevsim netdevsim0 netdevsim3 (unregistering): unset [1, 0] type 2 family 0 port 6081 - 0 00:20:19 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) r1 = openat$zero(0xffffffffffffff9c, &(0x7f0000000000), 0x1c00, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, &(0x7f0000000040)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:19 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0xb00, 0x0) 00:20:19 executing program 1: r0 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)=0x0) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000080)=0x0) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f00000000c0)=0x0) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000100)=0x0) sendmsg$NFC_CMD_DISABLE_SE(r0, &(0x7f00000001c0)={&(0x7f0000000000)={0x10, 0x0, 0x0, 0x100}, 0xc, &(0x7f0000000180)={&(0x7f0000000200)=ANY=[@ANYBLOB='<\x00\x00\x00', @ANYRES16=0x0, @ANYBLOB="000128bd7000fedbdf2512000000080015000000000008000100", @ANYRES32=r1, @ANYBLOB="08000100", @ANYRES32=r2, @ANYBLOB="08000100", @ANYRES32=r3, @ANYBLOB="08000100", @ANYRES32=r4, @ANYBLOB="c1f37dcbf7241cf9f9e4099bd93628b0bdb47058f2dd462dbb127d09c86a3b0eb16e87048c63dd8c8080bf35b23aa7e235253fd1c8072cd2a329114ed703fe0c2da03e480abda2900baade72502310129aa63515f6bd"], 0x3c}, 0x1, 0x0, 0x0, 0x40014}, 0x8880) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) socket$inet6_udplite(0xa, 0x2, 0x88) 00:20:19 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x400448dd, &(0x7f0000001440)) 00:20:19 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0xc00, 0x0) [ 1219.668354][ T1942] netdevsim netdevsim0 netdevsim2 (unregistering): unset [1, 0] type 2 family 0 port 6081 - 0 00:20:19 executing program 1: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) r1 = openat$zero(0xffffffffffffff9c, &(0x7f0000000000), 0x1c00, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, &(0x7f0000000040)={0xc}) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:19 executing program 4: r0 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) (async) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)=0x0) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000080)=0x0) (async, rerun: 64) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f00000000c0)=0x0) (async, rerun: 64) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000100)=0x0) sendmsg$NFC_CMD_DISABLE_SE(r0, &(0x7f00000001c0)={&(0x7f0000000000)={0x10, 0x0, 0x0, 0x100}, 0xc, &(0x7f0000000180)={&(0x7f0000000200)=ANY=[@ANYBLOB='<\x00\x00\x00', @ANYRES16=0x0, @ANYBLOB="000128bd7000fedbdf2512000000080015000000000008000100", @ANYRES32=r1, @ANYBLOB="08000100", @ANYRES32=r2, @ANYBLOB="08000100", @ANYRES32=r3, @ANYBLOB="08000100", @ANYRES32=r4, @ANYBLOB="c1f37dcbf7241cf9f9e4099bd93628b0bdb47058f2dd462dbb127d09c86a3b0eb16e87048c63dd8c8080bf35b23aa7e235253fd1c8072cd2a329114ed703fe0c2da03e480abda2900baade72502310129aa63515f6bd"], 0x3c}, 0x1, 0x0, 0x0, 0x40014}, 0x8880) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) (async) socket$inet6_udplite(0xa, 0x2, 0x88) 00:20:19 executing program 3: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x25, 0x0) 00:20:19 executing program 2: r0 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) syz_genetlink_get_family_id$netlbl_unlabel(&(0x7f0000000140), r0) r1 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f00000000c0), 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r1, 0x40182103, 0x0) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x141282, 0x0) r2 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) r3 = syz_genetlink_get_family_id$nl802154(&(0x7f00000001c0), r0) ioctl$sock_SIOCGIFINDEX_802154(r0, 0x8933, &(0x7f0000000200)={'wpan4\x00', 0x0}) ioctl$sock_SIOCGIFINDEX_802154(r0, 0x8933, &(0x7f0000000240)={'wpan0\x00', 0x0}) ioctl$sock_SIOCGIFINDEX_802154(r0, 0x8933, &(0x7f0000000280)={'wpan0\x00', 0x0}) ioctl$sock_SIOCGIFINDEX_802154(r0, 0x8933, &(0x7f00000002c0)={'wpan3\x00', 0x0}) sendmsg$NL802154_CMD_GET_SEC_LEVEL(r2, &(0x7f00000003c0)={&(0x7f0000000180)={0x10, 0x0, 0x0, 0x8}, 0xc, &(0x7f0000000380)={&(0x7f0000000300)={0x6c, r3, 0x400, 0x70bd2c, 0x25dfdbff, {}, [@NL802154_ATTR_IFINDEX={0x8, 0x3, r4}, @NL802154_ATTR_WPAN_DEV={0xc, 0x6, 0x200000000}, @NL802154_ATTR_IFINDEX={0x8, 0x3, r5}, @NL802154_ATTR_WPAN_DEV={0xc, 0x6, 0x100000001}, @NL802154_ATTR_WPAN_DEV={0xc, 0x6, 0x100000001}, @NL802154_ATTR_IFINDEX={0x8}, @NL802154_ATTR_IFINDEX={0x8, 0x3, r6}, @NL802154_ATTR_WPAN_DEV={0xc, 0x6, 0x100000001}, @NL802154_ATTR_IFINDEX={0x8, 0x3, r7}]}, 0x6c}, 0x1, 0x0, 0x0, 0x20004000}, 0x20000000) r8 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) r9 = syz_genetlink_get_family_id$nfc(&(0x7f0000000440), r2) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000480)=0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f00000004c0)=0x0) sendmsg$NFC_CMD_DISABLE_SE(r2, &(0x7f0000000580)={&(0x7f0000000400)={0x10, 0x0, 0x0, 0x80000000}, 0xc, &(0x7f0000000540)={&(0x7f0000000500)={0x34, r9, 0x400, 0x70bd29, 0x25dfdbfe, {}, [@NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r10}, @NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r11}, @NFC_ATTR_SE_INDEX={0x8, 0x15, 0x2}, @NFC_ATTR_SE_INDEX={0x8}]}, 0x34}, 0x1, 0x0, 0x0, 0x20000000}, 0x4800) getsockopt$EBT_SO_GET_INIT_ENTRIES(r8, 0x0, 0x83, 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r1, 0xc0502100, &(0x7f0000000040)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r8, 0x40182103, &(0x7f0000000100)={r12, 0x0, r1, 0x7fffffff, 0x80000}) [ 1219.803100][ T1942] netdevsim netdevsim0 netdevsim1 (unregistering): unset [1, 0] type 2 family 0 port 6081 - 0 00:20:19 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x400454ca, &(0x7f0000001440)) [ 1219.893431][ T1942] netdevsim netdevsim0 netdevsim0 (unregistering): unset [1, 0] type 2 family 0 port 6081 - 0 00:20:19 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x4c80, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) [ 1219.916352][ T3198] chnl_net:caif_netlink_parms(): no params data found 00:20:19 executing program 3: r0 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) syz_genetlink_get_family_id$netlbl_unlabel(&(0x7f0000000140), r0) r1 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f00000000c0), 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r1, 0x40182103, 0x0) (async) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x141282, 0x0) (async) r2 = syz_init_net_socket$nl_generic(0x10, 0x3, 0x10) (async) r3 = syz_genetlink_get_family_id$nl802154(&(0x7f00000001c0), r0) (async) ioctl$sock_SIOCGIFINDEX_802154(r0, 0x8933, &(0x7f0000000200)={'wpan4\x00', 0x0}) (async) ioctl$sock_SIOCGIFINDEX_802154(r0, 0x8933, &(0x7f0000000240)={'wpan0\x00', 0x0}) (async) ioctl$sock_SIOCGIFINDEX_802154(r0, 0x8933, &(0x7f0000000280)={'wpan0\x00', 0x0}) (async) ioctl$sock_SIOCGIFINDEX_802154(r0, 0x8933, &(0x7f00000002c0)={'wpan3\x00', 0x0}) sendmsg$NL802154_CMD_GET_SEC_LEVEL(r2, &(0x7f00000003c0)={&(0x7f0000000180)={0x10, 0x0, 0x0, 0x8}, 0xc, &(0x7f0000000380)={&(0x7f0000000300)={0x6c, r3, 0x400, 0x70bd2c, 0x25dfdbff, {}, [@NL802154_ATTR_IFINDEX={0x8, 0x3, r4}, @NL802154_ATTR_WPAN_DEV={0xc, 0x6, 0x200000000}, @NL802154_ATTR_IFINDEX={0x8, 0x3, r5}, @NL802154_ATTR_WPAN_DEV={0xc, 0x6, 0x100000001}, @NL802154_ATTR_WPAN_DEV={0xc, 0x6, 0x100000001}, @NL802154_ATTR_IFINDEX={0x8}, @NL802154_ATTR_IFINDEX={0x8, 0x3, r6}, @NL802154_ATTR_WPAN_DEV={0xc, 0x6, 0x100000001}, @NL802154_ATTR_IFINDEX={0x8, 0x3, r7}]}, 0x6c}, 0x1, 0x0, 0x0, 0x20004000}, 0x20000000) (async) r8 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) (async) r9 = syz_genetlink_get_family_id$nfc(&(0x7f0000000440), r2) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000480)=0x0) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f00000004c0)=0x0) sendmsg$NFC_CMD_DISABLE_SE(r2, &(0x7f0000000580)={&(0x7f0000000400)={0x10, 0x0, 0x0, 0x80000000}, 0xc, &(0x7f0000000540)={&(0x7f0000000500)={0x34, r9, 0x400, 0x70bd29, 0x25dfdbfe, {}, [@NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r10}, @NFC_ATTR_DEVICE_INDEX={0x8, 0x1, r11}, @NFC_ATTR_SE_INDEX={0x8, 0x15, 0x2}, @NFC_ATTR_SE_INDEX={0x8}]}, 0x34}, 0x1, 0x0, 0x0, 0x20000000}, 0x4800) (async) getsockopt$EBT_SO_GET_INIT_ENTRIES(r8, 0x0, 0x83, 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r1, 0xc0502100, &(0x7f0000000040)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r8, 0x40182103, &(0x7f0000000100)={r12, 0x0, r1, 0x7fffffff, 0x80000}) 00:20:19 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0xd00, 0x0) 00:20:19 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x40049409, &(0x7f0000001440)) [ 1220.011892][ T3198] bridge0: port 1(bridge_slave_0) entered blocking state [ 1220.016302][ T3198] bridge0: port 1(bridge_slave_0) entered disabled state [ 1220.018392][ T3198] bridge_slave_0: entered allmulticast mode [ 1220.020474][ T3198] bridge_slave_0: entered promiscuous mode [ 1220.023447][ T3198] bridge0: port 2(bridge_slave_1) entered blocking state [ 1220.026632][ T3198] bridge0: port 2(bridge_slave_1) entered disabled state [ 1220.028561][ T3198] bridge_slave_1: entered allmulticast mode [ 1220.030606][ T3198] bridge_slave_1: entered promiscuous mode 00:20:20 executing program 2: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x4c80, 0x0) (async) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x4c80, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:20 executing program 1: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x140, 0x0) 00:20:20 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0xe00, 0x0) [ 1220.128070][ T3198] bond0: (slave bond_slave_0): Enslaving as an active interface with an up link 00:20:20 executing program 4: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x5c, 0x0) [ 1220.171215][ T3198] bond0: (slave bond_slave_1): Enslaving as an active interface with an up link 00:20:20 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x40086602, &(0x7f0000001440)) 00:20:20 executing program 2: r0 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) read$midi(r0, &(0x7f0000000000)=""/186, 0xba) r1 = socket$l2tp6(0xa, 0x2, 0x73) setsockopt$bt_rfcomm_RFCOMM_LM(r0, 0x12, 0x3, &(0x7f00000000c0)=0x8, 0x4) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000040)={0x0, 0x1, r1, 0xfffffffb, 0x80000}) syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) [ 1220.232430][ T3198] team0: Port device team_slave_0 added 00:20:20 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0x1100, 0x0) 00:20:20 executing program 1: r0 = gettid() syz_open_procfs$namespace(r0, &(0x7f0000000000)='ns/pid_for_children\x00') syz_open_procfs$namespace(r0, 0x0) [ 1220.272034][ T3198] team0: Port device team_slave_1 added 00:20:20 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:20 executing program 4: r0 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) read$midi(r0, &(0x7f0000000000)=""/186, 0xba) (async) r1 = socket$l2tp6(0xa, 0x2, 0x73) (async) setsockopt$bt_rfcomm_RFCOMM_LM(r0, 0x12, 0x3, &(0x7f00000000c0)=0x8, 0x4) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000040)={0x0, 0x1, r1, 0xfffffffb, 0x80000}) (async) syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) 00:20:20 executing program 2: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x140, 0x0) [ 1220.380731][ T3198] batman_adv: batadv0: Adding interface: batadv_slave_0 [ 1220.384277][ T3198] batman_adv: batadv0: The MTU of interface batadv_slave_0 is too small (1500) to handle the transport of batman-adv packets. Packets going over this interface will be fragmented on layer2 which could impact the performance. Setting the MTU to 1560 would solve the problem. 00:20:20 executing program 3: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x300, 0x0) 00:20:20 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x40186366, &(0x7f0000001440)) [ 1220.402429][ T3198] batman_adv: batadv0: Not using interface batadv_slave_0 (retrying later): interface not active 00:20:20 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) [ 1220.414277][ T3198] batman_adv: batadv0: Adding interface: batadv_slave_1 [ 1220.426194][ T3198] batman_adv: batadv0: The MTU of interface batadv_slave_1 is too small (1500) to handle the transport of batman-adv packets. Packets going over this interface will be fragmented on layer2 which could impact the performance. Setting the MTU to 1560 would solve the problem. [ 1220.437391][ T3198] batman_adv: batadv0: Not using interface batadv_slave_1 (retrying later): interface not active 00:20:20 executing program 3: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0], 0x8, 0x7, 0x3, 0x2}) r3 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r3, 0xc05064a7, &(0x7f0000000480)={&(0x7f00000002c0)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000300)=[{}], &(0x7f0000000380)=[0x0], &(0x7f0000000440)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x1, 0x1, 0x4, 0x0, r2}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000500)={&(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000240)=[0x0], 0x4, r4, 0xb0b0b0b0}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r3, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r3, 0xc05064a7, &(0x7f00000003c0)={&(0x7f0000000280)=[0x0], &(0x7f0000000580)=[{}], &(0x7f0000000540)=[0x0, 0x0, 0x0], &(0x7f0000000380), 0x1, 0x3, 0x1, 0x0, r5}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f00000001c0)={&(0x7f0000000180)=[r1, r5], 0x2, 0x800}) 00:20:20 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0x1200, 0x0) 00:20:20 executing program 1: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x112000, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000100)={0xc}) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(r0, 0x3ba0, &(0x7f0000000000)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff, 0x1}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(r0, 0x3ba0, &(0x7f0000000080)={0x48, 0x7, r1, 0x0, 0x10000, 0x0, 0x2, 0x30bd9, 0x200cc4}) 00:20:20 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x401c5820, &(0x7f0000001440)) 00:20:20 executing program 4: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000000)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ID_VALID(r0, 0x40082102, &(0x7f00000000c0)=r1) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000080)={r1, 0x1, r0, 0x9}) r2 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000180)={r1, 0x3, r2, 0x0, 0x80000}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r2, 0xc02064b9, &(0x7f0000001200)={0x0, 0x0, 0x0, r3}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000100)={r1, 0x1, r2, 0xfd90}) 00:20:20 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0x1800, 0x0) [ 1220.646630][ T3198] hsr_slave_0: entered promiscuous mode 00:20:20 executing program 2: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x500, 0x0) 00:20:20 executing program 3: r0 = gettid() syz_open_procfs$namespace(r0, &(0x7f0000000000)='ns/pid_for_children\x00') (async) syz_open_procfs$namespace(r0, 0x0) [ 1220.700004][ T3198] hsr_slave_1: entered promiscuous mode 00:20:20 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x4020940d, &(0x7f0000001440)) [ 1220.725691][ T3198] debugfs: Directory 'hsr0' with parent 'hsr' already present! [ 1220.729722][ T3198] Cannot create hsr debugfs directory 00:20:20 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x112000, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000100)={0xc}) (async) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(r0, 0x3ba0, &(0x7f0000000000)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff, 0x1}) ioctl$IOMMU_TEST_OP_ACCESS_PAGES$syz(r0, 0x3ba0, &(0x7f0000000080)={0x48, 0x7, r1, 0x0, 0x10000, 0x0, 0x2, 0x30bd9, 0x200cc4}) 00:20:20 executing program 3: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0], 0x8, 0x7, 0x3, 0x2}) r3 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r3, 0xc05064a7, &(0x7f0000000480)={&(0x7f00000002c0)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000300)=[{}], &(0x7f0000000380)=[0x0], &(0x7f0000000440)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x1, 0x1, 0x4, 0x0, r2}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000500)={&(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000240)=[0x0], 0x4, r4, 0xb0b0b0b0}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r3, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r3, 0xc05064a7, &(0x7f00000003c0)={&(0x7f0000000280)=[0x0], &(0x7f0000000580)=[{}], &(0x7f0000000540)=[0x0, 0x0, 0x0], &(0x7f0000000380), 0x1, 0x3, 0x1, 0x0, r5}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f00000001c0)={&(0x7f0000000180)=[r1, r5], 0x2, 0x800}) syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0], 0x8, 0x7, 0x3, 0x2}) (async) syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) (async) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r3, 0xc05064a7, &(0x7f0000000480)={&(0x7f00000002c0)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000300)=[{}], &(0x7f0000000380)=[0x0], &(0x7f0000000440)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x1, 0x1, 0x4, 0x0, r2}) (async) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000500)={&(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000240)=[0x0], 0x4, r4, 0xb0b0b0b0}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r3, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) (async) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r3, 0xc05064a7, &(0x7f00000003c0)={&(0x7f0000000280)=[0x0], &(0x7f0000000580)=[{}], &(0x7f0000000540)=[0x0, 0x0, 0x0], &(0x7f0000000380), 0x1, 0x3, 0x1, 0x0, r5}) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f00000001c0)={&(0x7f0000000180)=[r1, r5], 0x2, 0x800}) (async) 00:20:20 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0x2000, 0x0) 00:20:20 executing program 3: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000000)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ID_VALID(r0, 0x40082102, &(0x7f00000000c0)=r1) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000080)={r1, 0x1, r0, 0x9}) r2 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000180)={r1, 0x3, r2, 0x0, 0x80000}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r2, 0xc02064b9, &(0x7f0000001200)={0x0, 0x0, 0x0, r3}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000100)={r1, 0x1, r2, 0xfd90}) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) (async) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000000)) (async) ioctl$SECCOMP_IOCTL_NOTIF_ID_VALID(r0, 0x40082102, &(0x7f00000000c0)=r1) (async) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000080)={r1, 0x1, r0, 0x9}) (async) syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r2, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) (async) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000180)={r1, 0x3, r2, 0x0, 0x80000}) (async) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r2, 0xc02064b9, &(0x7f0000001200)={0x0, 0x0, 0x0, r3}) (async) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000100)={r1, 0x1, r2, 0xfd90}) (async) 00:20:20 executing program 2: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(r0, 0xc00864bf, &(0x7f0000000040)={0x0, 0x1}) 00:20:20 executing program 1: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x600, 0x0) 00:20:20 executing program 4: r0 = gettid() syz_open_procfs$namespace(r0, 0x0) 00:20:20 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x50540000, &(0x7f0000001440)) 00:20:20 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0x2500, 0x0) 00:20:20 executing program 1: syz_open_procfs$namespace(0x0, &(0x7f0000001700)='ns/time_for_children\x00') r0 = gettid() syz_open_procfs$namespace(r0, &(0x7f0000000040)='ns/net\x00') syz_open_procfs$namespace(r0, &(0x7f0000000000)='ns/uts\x00') 00:20:20 executing program 2: ioctl$MEDIA_IOC_G_TOPOLOGY(0xffffffffffffffff, 0xc0487c04, &(0x7f00000003c0)={0x0, 0x3, 0x0, &(0x7f0000000000)=[{}, {}, {}], 0x1, 0x0, &(0x7f0000000140)=[{}], 0x9, 0x0, &(0x7f00000001c0)=[{}, {}, {}, {}, {}, {}, {}, {}, {}], 0x4, 0x0, &(0x7f0000000300)=[{}, {}, {}, {}]}) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) 00:20:20 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x28000, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000040)={0xc}) r1 = seccomp$SECCOMP_SET_MODE_FILTER_LISTENER(0x1, 0xc, &(0x7f00000000c0)={0x4, &(0x7f0000000080)=[{0x2, 0x9, 0x17, 0x19}, {0x1, 0x3, 0x20, 0xffffffff}, {0x7, 0x7, 0x4, 0x4}, {0x3, 0x1, 0x4, 0x1}]}) ioctl$SECCOMP_IOCTL_NOTIF_RECV(0xffffffffffffffff, 0xc0502100, &(0x7f0000000100)={0x0}) r3 = socket$inet(0x2, 0x4, 0x4) r4 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f00000001c0), 0x222082, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000280)={0x48, 0x8, r4, 0x0, 0x1, 0x2, 0x4f, &(0x7f0000000200)="ef106918a1adaf022b2756905309c291cd011ce97d49ac2d3d89eaa8e44290392fe96c68aafda38beb41c0129f1937d69795f01e92faa3fb15753dd38b82a84eb2ff4ae31337df3cda72d7b21abc58", 0x10000}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r1, 0x40182103, &(0x7f0000000180)={r2, 0x0, r3, 0x3ff, 0x80000}) ioctl$sock_inet_SIOCADDRT(r3, 0x890b, &(0x7f00000003c0)={0x0, {0x2, 0x4e22, @empty}, {0x2, 0x4e24, @initdev={0xac, 0x1e, 0x0, 0x0}}, {0x2, 0x4e21, @initdev={0xac, 0x1e, 0x1, 0x0}}, 0x200, 0x0, 0x0, 0x0, 0x1, &(0x7f0000000300)='veth0_vlan\x00', 0x0, 0x3, 0x9}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:21 executing program 4: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(r0, 0xc00864bf, &(0x7f0000000040)={0x0, 0x1}) syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) (async) ioctl$DRM_IOCTL_SYNCOBJ_CREATE(r0, 0xc00864bf, &(0x7f0000000040)={0x0, 0x1}) (async) 00:20:21 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0x3f00, 0x0) 00:20:21 executing program 1: ioctl$MEDIA_IOC_G_TOPOLOGY(0xffffffffffffffff, 0xc0487c04, &(0x7f00000003c0)={0x0, 0x3, 0x0, &(0x7f0000000000)=[{}, {}, {}], 0x1, 0x0, &(0x7f0000000140)=[{}], 0x9, 0x0, &(0x7f00000001c0)=[{}, {}, {}, {}, {}, {}, {}, {}, {}], 0x4, 0x0, &(0x7f0000000300)=[{}, {}, {}, {}]}) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$MEDIA_IOC_G_TOPOLOGY(0xffffffffffffffff, 0xc0487c04, &(0x7f00000003c0)={0x0, 0x3, 0x0, &(0x7f0000000000)=[{}, {}, {}], 0x1, 0x0, &(0x7f0000000140)=[{}], 0x9, 0x0, &(0x7f00000001c0)=[{}, {}, {}, {}, {}, {}, {}, {}, {}], 0x4, 0x0, &(0x7f0000000300)=[{}, {}, {}, {}]}) (async) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) (async) 00:20:21 executing program 3: r0 = syz_open_dev$dri(&(0x7f0000000000), 0xc8, 0x25a002) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0, 0x0, 0x0], 0x3, 0xf235955baba59ed3, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(0xffffffffffffffff, 0xc00464c9, &(0x7f0000001240)={r1}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000040)={r1}) 00:20:21 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x51540000, &(0x7f0000001440)) 00:20:21 executing program 4: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x700, 0x0) 00:20:21 executing program 1: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x28000, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000040)={0xc}) seccomp$SECCOMP_SET_MODE_FILTER_LISTENER(0x1, 0xc, &(0x7f00000000c0)={0x4, &(0x7f0000000080)=[{0x2, 0x9, 0x17, 0x19}, {0x1, 0x3, 0x20, 0xffffffff}, {0x7, 0x7, 0x4, 0x4}, {0x3, 0x1, 0x4, 0x1}]}) (async) r1 = seccomp$SECCOMP_SET_MODE_FILTER_LISTENER(0x1, 0xc, &(0x7f00000000c0)={0x4, &(0x7f0000000080)=[{0x2, 0x9, 0x17, 0x19}, {0x1, 0x3, 0x20, 0xffffffff}, {0x7, 0x7, 0x4, 0x4}, {0x3, 0x1, 0x4, 0x1}]}) ioctl$SECCOMP_IOCTL_NOTIF_RECV(0xffffffffffffffff, 0xc0502100, &(0x7f0000000100)={0x0}) r3 = socket$inet(0x2, 0x4, 0x4) r4 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f00000001c0), 0x222082, 0x0) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f0000000280)={0x48, 0x8, r4, 0x0, 0x1, 0x2, 0x4f, &(0x7f0000000200)="ef106918a1adaf022b2756905309c291cd011ce97d49ac2d3d89eaa8e44290392fe96c68aafda38beb41c0129f1937d69795f01e92faa3fb15753dd38b82a84eb2ff4ae31337df3cda72d7b21abc58", 0x10000}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r1, 0x40182103, &(0x7f0000000180)={r2, 0x0, r3, 0x3ff, 0x80000}) ioctl$sock_inet_SIOCADDRT(r3, 0x890b, &(0x7f00000003c0)={0x0, {0x2, 0x4e22, @empty}, {0x2, 0x4e24, @initdev={0xac, 0x1e, 0x0, 0x0}}, {0x2, 0x4e21, @initdev={0xac, 0x1e, 0x1, 0x0}}, 0x200, 0x0, 0x0, 0x0, 0x1, &(0x7f0000000300)='veth0_vlan\x00', 0x0, 0x3, 0x9}) (async) ioctl$sock_inet_SIOCADDRT(r3, 0x890b, &(0x7f00000003c0)={0x0, {0x2, 0x4e22, @empty}, {0x2, 0x4e24, @initdev={0xac, 0x1e, 0x0, 0x0}}, {0x2, 0x4e21, @initdev={0xac, 0x1e, 0x1, 0x0}}, 0x200, 0x0, 0x0, 0x0, 0x1, &(0x7f0000000300)='veth0_vlan\x00', 0x0, 0x3, 0x9}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:21 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0x4000, 0x0) 00:20:21 executing program 4: r0 = syz_open_dev$dri(&(0x7f0000000000), 0xc8, 0x25a002) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0, 0x0, 0x0], 0x3, 0xf235955baba59ed3, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(0xffffffffffffffff, 0xc00464c9, &(0x7f0000001240)={r1}) (async) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(0xffffffffffffffff, 0xc00464c9, &(0x7f0000001240)={r1}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000040)={r1}) 00:20:21 executing program 2: openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x900, 0x0) 00:20:21 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x52540000, &(0x7f0000001440)) 00:20:21 executing program 3: r0 = gettid() syz_open_procfs$namespace(r0, 0x0) 00:20:21 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0x4040, 0x0) 00:20:21 executing program 1: syz_open_procfs$namespace(0x0, &(0x7f0000001700)='ns/time_for_children\x00') r0 = gettid() syz_open_procfs$namespace(r0, &(0x7f0000000040)='ns/net\x00') (async) syz_open_procfs$namespace(r0, &(0x7f0000000000)='ns/uts\x00') 00:20:21 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:21 executing program 1: ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f00000fd880)={0x0, ""/256, 0x0}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f00000fda80)={0x0, 0x0, "da7048e1ed776d7c59a96c244735425e9920c8d2bbaec41a1cef0b8db1c850512e0b14f7cb58815c11ed4de3d749192fcab22ccc00e907a86620dabdeb1e35dd59823a50cb8db38bf00d58570ae08f832df34ae6638685d17aa6f1135d06b11d88a2382ee99bb226ea6d3abf8db57b7f4de829f7786e3944a04c0f27f63b09530d833f24a9558fe4cc8fb82387f5a4ae22ef24e7d4793b88f424d993ea7a394dcfa835cfb21fe38b4126f3d39590f1aec6b6cf213775b998579149945a92aac8e2ccc0c2bf612d0f7e1e892c0a563306a543fab92a1bb551ff060528b395cae288b242e6f8b69d6db2cbd7d2e1ceef3819a2103d6dc744695122f67802b124cd", "94463879b95679de8c4951fd9f987b92be4c991d388e6eb1c8e425a1ec338c3f774d728094bc8fa34814622ce61eb329d857f6fb9470be61514d9d0d0aa037f09c1d5dd27f916fab3b45236e7edd3ba15c7dbf208f7ec027efbffd1d08c6fba34e4ffe87c19dc8d32000396da6a959e3d4ccd8f0a56efb3039c46337aed5b0ed7a0eb3ba3991957f8df7855fa802c7ca14a7cef03803ca4af57c366d6f0b326794197f8447328ea32ce8b72da38134a1b3dcc911c1a4761ac343b60590036c6e10998a60f1eb8eee7a275a27a9031d88141413499a97aea1c7639408afaf99801bb081edc07eaffa3206c5ae260f4264f8e72dcca66b8c247084e7807340e59a7feb6f9de7dfe4fc223b5cc927a5cc7509038c075e1dffe9823eb68139f32f6550b2c479c668689e2979790b11971338fa330c130547ccd32a93ef739633913522734c6d308950bf90a081aabb9c486b1512dc6444f7aa89fdaa00c048ee261d8fd3ea3b7938eae56e24015cb7288ecd5ffd1418a4b0a95684e9508a9bb1bd51af2d4e350293bcdf0783de920261027ded64165c215a6c536bab2d63b92a3d1796a25b5ae0465c2255d50cba9b625618ca1b9e51feee23a1013299d5b545b9638a4f8feffb2fdf36a879bf3553792c64e67fd3cf003efbb5e4b630ae342d42423269a4f8152950d228bd08432a9ed4805495ec0ae2e55743e4ac40127dcf45121617cab90ecc5f60ba017695462a1007d2f4a507f579287856b5e625abf4668f356b319b867ca72bf4d871bd267deee98f6c681679cd6e9052be435b94c3d9f931b0d17ac5d11e9fb5f982278c3943cfa3eab7b6bf5280c9c241c54e301b74ddb14d8b96bbbed637d34ea89f59c481e46928f123cae8eab3c994c6ed8f4468061bd27da9d53354f26be61537a94eda96a8a629e2a5c0f40d2ca491e83239519e9a4b3696ed9106a7998e19ab40f5cc999455c2ebd8a16e4ce252f314809606a468c9a360efc7c143a13aa03f7e0e298e2f633f345df76019fa0bb06e0b0366a7729a3a0d6196be89ad6a15d9b1fbf857ea005adcfbe074454e5f2b774e5c066899d419333c7089842c9cdae493b170f8efdf298c5d85e722a470070698b39821a15d5496f38467a3a0b1dc9028c97c7ba348b4cb32351d6e8e88e1216cbffd09991159299428c7a097176c696bd627d2e15f7c77e652f58f249c79f66872d3c3ba12889e95a34ea3041f0becf4fd917de03e55eab4baa758758ad0e1928829d2972ea1b78c3287132c42a0d784a37d623a28e582f0a2077f855a5aac6b3a84fd09f413596cf68085aeae4f4c857ba566b076b07b8d981a0352994e629a67125b6d7f913c24c386dc70e3afab3c0f6466a27de24711981d452df2c43cf13b5fd8bb7e9842c819d6679cd89db3cf0d81540b89f65c67cdea2935dacdce03ee138ef9455a3a235703c91a5a1ac7daa575d083382518225f128ab7c5718f294c15fa487ecf25bb3dbb8f9c64a65a7fc5187fa1752c55d5ee394b59d15f10bd6b0caee44840caf48207462c96cb0cc04040529e851b3b0eec6cac4dd829e938b0e93eeaab188a4445b3b0909a32c020797aa5b75f197e18be1ca558eb44b7e05b8cc6e811d9db675be59c31e648f65e8071b93eff451b85fac3d9d16f9e349a5e9cae120ba2ab530e42bc780882e6e5af0927e7c0abd2abecbf42c773fd8068f95857b43c0d6b3963cfee52fa39a7271b4b7c835ea25b240e764f7037d03eefa41680381f26b77a658a892c43296e8bcbd1077ee5613d02bd27030c2dc1d0ed21ab1ba0421fe4ed6d7583b74f860ec4d088a7e4bc3033db91a3e51c0e6e9a5de049de9a269814e0e3a3b4c45705ef9e53da445d87b32d1c1b56b59b4dd2b25ee66c8b1adc828dec9377e7b66faa100cb5c2f7af3e253caa68c855a6570e1a69c6a9d926b8b957ac9870984827bcbc9ba9a9d81e133776102c68572197a1ca74b99b1c020cf60c027f62bc56347b6c805cd6300f9cdc0c1085ca74241208b4b65c8a0bca9829da444d9b5bf03dd9090bc2317e0d8f8389fd499d6659f2feea59b972c439705f4175fa8fc461d25ff17dfa7639950927c125e0bb447922dbcc3051ede2aa25162e8eab21a291110f55e16c0650a81ea1175a81e410e387ef94a90fc9dafc913ecd338242d6b25c7a474c92885c9250bdac7c0871c71d06b12175d62bf52676f854f6616e14c8cbb9aba9c246ec5c7c6ea9adfe29d0a68993f62afe2b96c3f6ca7c47f383f38e3900a2bfe03a77141038ea5bd6bcacc2833eadff1f44bb3c09794894ae31e3b0c01ee8bfdf972d124cd1dbb6be22275eb9b7e609b0a4f157ed39f3b3cbbb05bcc8f2208f8c393c912f62cbfb3ccabb8db64539d88d0abb785067d18f17ccf60bf01fc8f5f6ae2890e5d438c6b0fae834192611baf4011b29251ac7ceb1f92e018bb21f64d35f2a2453ee77ff929fe7afa5893f4d0ac15e4a6fe48e1c0b7ced2bc1193a4da20a2ce2f0409c285284f2123045aab47e1b195b4bc9f5259c3e5d18666dd543b237b19ba864eabaf2c50949afdce340a5ea3c999fbbf6104bf6c4c156aed93cc152af54c9d11c626cb1df4c990aa950bf427f0b8233e80f38875613d0f71ec2fc06aa7b2433cc396ff78f205009edc46bfbdbba33f42558d05974fb4891027e87c728786d3d55da38944df11e5b7cc809bd3154aa61b1bf5a73e77f63a95639e4f502790a1b550f098157528f7a5107fc18531814bd4e5f7223802a37c751ee9a84b9fa9d8c123a576d734d190a888e9fd0d5e14a3d80d037484210f0d6c2bb64cc1f2f619c2af337091bc5a25627d8fff061361a60f0f7624ef51c2b120a9c233ad0fb277cf513e67fa1003376d096d7add4c40bca84e9c3ad7530eb39506ce6fd28a8dfd4c73fc302974f1651c78fb6777121fbe9dae44ab726dba9a159ddb6de8046c061e1d7cb8f70e79c23df2a2aa3fdd46bf760c8830f43f05befec15570feb9dad9a8e6af3486bdd0365a02e8bd7023cbaf1674f7b45a9ec9b9756804cc78d2b77e0702dca98777bad6907d8a97dee9f70c133b6484c5c19abca043e0ff519adc2540eafe82a19701259d354657ab8f84387624992fc0f905331393141742c6450fe19251c1fe60469e5c6713356c1c373e621c06b3a1d336c01c96725cce321b226f43ff35ea7fcc7d7df725f55230d68554ff5af3cc5e3b8c3e6b3123c819199e65b2dc11ddcac6164ea98a7dfb22162f80b0ce0dd8b911ffb6cc645003eba98449035e3ace18f56fcf4456e01264ec2a7a37838f104230135b59beee09bc40f73e96c833266cc64e9ee639e1529ef2a4e7eed54b9bb3101d67081345686ed150720b24d7621733505a46a30706b3667a2bf2aa85060d14bd8a953653c72aba0a4ac5a14848f59c87ed7fb6a89612de76ef0bc248e59cd2986a6c82b58d2b7708c249f682899f623541ac4818e009ce1ab6346363f3dea68311ccf5a0ff6dc2b10c0d240221d609a40f6a52d1d2e60597657e97e901a2e966b634d395cce2e00c9c05954b92b3c8ee8cad22556208d8263e54eb2dade0aa51d10c36761cf2e29be645976078f545d0acae2cb6554964ee746ce5142481473b353851d61d950ae7432627a532ba49ba8890b971ae289f81814c77e26bcd26519b858118d3188eaec9ada8154b7a679280eb48b5017b624006a4bba794e91df6f85129e6f10506c2062656df5b9eea8d16d482f081a92b79af0e460c2a60cad86728a6759184a0a2ee1832d409f772212685fcad65d6e947a88f6113b6038fa215b8ed9eacf218fc945bc4c9476f5f108b0d06c292d3daed155a8aeab18f4e60f6a71d46c2cad366f15578fa21c414f0960a7a65d4bb0ce935f3b17ea0eb8f7f0c1426cbb48a541c9a32182f60bc8989d440dca27fb3ef0348009a5b1d3d7069327dac73f04e3e29113ce78cc4af1647971753e1f5913b58c07f7568697e286973b0c56f8caa594670c62ed3747b1f93a2586dbde254d36cf359e9c978697a66510afd77a3c94afec580726252284aa458b17ae359b9add39e14a1747376f6a5fdc20f00de1b09d802d2c699725b0f6c9072df150917349772c4242f887d8e91498b2496ec3b3d648cfd4904251fd09a1cbe08c8ea7dbf1ada71a743163a1add8ae5640669b6de27de60c287807c422e985faa303dafa700db69a7507c677cc628ae427f2174245ce84a21ed01e4ef98a7a9be23fa2934ff699bea8f0736719f85cc5e41908eb557aee74c0a743d636cfeb2bcbcde830143186c7b0dbc3354e3f666f659cde6ccb14d3e8313ea7f62b2157a969c8dbe58e46d74a0db6b6b3228049cc8534fb8524765e481974e732237c3c68298559231bec518bf20865dd1d5d875ee2a90d2616f7288a58c8d3158070373506f808bff8943b0ae2042fc8a1ba09f2787d64c45941935852a79b95793442a88645a33098e34e7e6fb2ab9964494e248ecba13fe87bd9a6956d9c2281731f2fcd2cf7bbcceb104dfa4d15b716c1f9dd6ebd1792c78c88286e2bd8dccabb8c39a6571cbf6d4dc9a1867709e633a8fb236ab453461ad27224902ff66e483aabb9ed4284cfdd36b2e464c4dccbd56b14e2f4fa8aadc13179c6fe4867967a8258279c41889c66de70ffaf20e03df8925ec5169aef77f95aeed25b26d1b53eff10d29993da8d3d20bc82934a338d0696932b1595751254a39d66ec29f706bc8327f029d0e5c91983b17632282cdf22e4bfa923712a9aedbf0dd381fed0fe7b616a5285dee8fe5a9600ce3b2b9fdaa9a84aff740adbea500c63b4d72cf6934213304712d2911a516038e9dfe989212cb54099a151eb3897a5453bb5b663a5d389cf2a02b319d70e1d1a763e1c0542889123f7aa0ac1a7a824f337721dd1bffca1d8a7130958f4cca9d7a070a59b423ca7dc7d468808b93f9b793239ac2800b03ad5ed74fb289cbe4615b41c1707e082e0edf428b1f55a17ac0a4327b6c2e56da14ca0719cd67c5a694bedf861ea156f28d8988d37af6b1a4deb9978b7e502267d8c02c8f1945f84b0f464da219e9e17c8e437c5b0d61f5bc1c3db53b2992c188031756bb829c508ee614557609111ff19480b0a31bc3b4407f8711d75ff63ae0e108a770ca13fcc7386bdae45331f3ee1d1d0ef644ab4122e7f320de322cbfc0051b3c850ef69302856706b8ddf18148be0b67c0e57b590afa8525dc61bad9e681e691efa740724e93038431b9381025be8b2d410d930cf36a8723e123286b3bfc729efb3caba3ce32fd53ca2e5737a0694512577ceb1365f0229470b3a14767e4f63c3eab8a09c8e19e8e4e4458c49ff5f64b9153a101897edaa22b93a8854a210bb211f6dc14628fa04903c6ed8a7667eba5e882e"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f00000fea80)={0x0, 0x0, "1cb8e3ee7111829d19aa22a1f7b71c38ae31f7d48493d090e6d8c282d27a497fb3550ea628a02f7e6f982c622570f566eaf782b55f8ebff70c935740e54d92ce2319b507a792fdb2f7f50dd4272566849b99ba46e2e480fa711f4eb46a64caf288d5cfb0137d407128628f2c67493e62440dce8f439a63949c9b3c7b098762d870394225db7a8aa53e925d1242db499461c631bcb236141a0fe26dae6e91bd0e34c7cf04e39edeeddb2efdbf3c34ca9d1ac69f98060132362887ab4da723b7921f126f49e113d02d67cab717f0b4af225045e2c9cf9f4889191619296117d2ed482e320d2cc727fbc21a7799bac5e2c615e6395f6d2161fb6b7a800e92c4b67c", "830260b4127a5cad79a908f86337b73116271d3a824e537f1015f2dfb9f6f546acf09c9bb012c7c5b7aa836fa4975f5f51cdde6215eb10e4cdd3c24671c1544370adc178d466356cf0ce1cfcfde870044e7f5e77ffd975820f9d19bac5094b8d1e2004821e113a0386791a3ae94e4998ab04c6166d89a89304972072046b326e14e6a32c7593b5e448853b439d1c7831b4435079ed544ca2e7894e7d8860d54def6bf86a3458f1b33e9ed2db712162460211c7c0aae9db80b1878c2334ce431c0f0394e14a4ab36761c9b57b6c23cfbac2a7a82f7ecbd0e82035eb28b6473a761a0dea32c98c75feccfe72f102bc7fd7a54c3ef9aeaf82e1a1886e8f9c02f9150e9b0f07d475710fd1330a9268b3594a0aca0d9bc989f8ab113d93c09900c5d11afa0c63c6cb4db3e3ca2b2df0d18ee3f1cea9d62404b5d781d8caf031eabf5a64e2094c3bbd528f9f2b5cf0e47aa4e6fd897eff2da56297ebe0791419b6cc5f766001fc90aafc167ac6b95168f7001f12b2f19468f6d3bcd500c39aed3f5cefc04b33448605b60bb9758c865dfbede8cea2a59a3808eaf6c995180a66b7d17fd946410371e36121c90df0d318211b1b2fd3ebf2dc53a6a51da268c46346f113597f5238e80c7808e8fac6ef67419dd360aa66287de6862f70e82c8eeca665f4963d314d6d1028e22afc7e7f8a7d79f62851210442acf9236c5dbb8025c98c84ba442a2bd4dcad411ed5530caa62a1911c10cd452f5c20beb7f4ea9cb27a23a32b1d9b3b029340109cc5ecb511dfe38ea0b943957ddde47714fb9c19761f049b0fb8f6a5983e7c4e9246ba900618231fc7864305b7b63d602163e2d8eb2c5a27e85d3e91798953dc40f4d9dd4bd1e7e287939a834ae4290d2c6ca573485e3eac0fae4da1a730bc8b67597067324d9faebd9fae493097ea83df7bbbfe47b13e5d96901b7a016d75dbc5e69216f2d7bc20ff38be41dde76670bee2c7b6dc63af393c33560a0416562d1f38dc5213addf9c171f96bfc2919f7d9f096dca5b9ab0b1ba03d6dbbb33cbceb4ac2ccaaa64d81a38e6b61fa1bab2106c787906e05fca686220bb565d335f38c23f382ca187108e968204d514b05d5ec9abf987daee7fb195b1f21890e3c382886c7057bf8135e54ad44c1907d37b614688eea3fc4b55fd9c8aec0d21f2dbb07587ed9fd5f5f6aae84eb2a4541a201dd600d65693002d31bad78e187b28c60e942cf54a8c457664f2f6f9071a72356519d3b573de10a202dd93f6614f98970dd7589a9645d742307bb98471a55c083d63314aa252f2a70490c7ecea06832689ae3b6ca079ec973ce7148f4820630461bc3efaacccbf36328001354eebd9fee036783430aedd83c1ec2dc7be00d8d32c2de3d3c9916f9da0e84b641cc5789c59f32c90cac549827f545f9e0ee4a6299564cdb693817be4b26079e9280203db7c1d5c4b9228fb7bf93c4e1398a904d97dda2a049c73939a690de8baede18db84659909a61c978c1df5bfcd970ff1612336ab4fb5cfd2576cb227c21ba292fb6a55dc507782076427e4fbe8f2fb91b16480e31071369627ab1ee137a8781025c3ef22a0d81b935914a42881cbe759ce77d037fc8a7c4afe8a2828d31fc9b5b82d22a0689c652c19f43bc2029743e340fac09747110b54a7252e8715e1db022aa9b841dc251ba75116f8ad4cca4a143e448ed901457ccd4147f9a26a608d1743f4555951b467eb4a1bdc51b85803215e72a4ddcec7d8eec8a927039ce6eefbd633a869e4b0517311e5a3f969495b3b0fb7bd4af2ad87b04a7190f6fda9f7c5a81ef7bf7e3a96a3d9ce3ea1ebbc70f69739a5fe51328f6d735b72c925817ae0031f9b5b67c1749532fb289a3a87d7e810b584fc65af6fb7a2a7be331d795a3cae5c7e0967ea5c0d53faeb1d99bc374dc40221ed7cbe7d8eadea49f0b14ec621853d888475ea99b79f11033f3e88e38de6026e9ebb2e76f38e627fb34a0aca1381e70ea73d68829f9943276cea738cf16bcc85a18a98612a7bf3bcea92e4b661ae58533f6134706abf45ba09b16b338d03f819c5b745551e8a7cc4619ed48cd67527874e2daa09c42390675932dc17b07d959180a08f95620b67f51bc7cd418ca23be82e7723154d11c054cf4aa74cbf7fbc66bd1409df7be35af212c819b19da3aaab6404fa4c412418e2d9eb7fa0381bb3d2d76a3af68a06f1de4206da6a00d49aa139415c105799921e5f45159f0812c5fdc88800d87e6f317fdeec06df51bdbfe84df91203f7ef97edb8140bc3ccd60499496290021a587a0289b019ec650585e7f6a69f4e253e41bf2b2a3bdf13b952e028ce3d13c35e3fef9eee0ca1cf025bab6844d7b6030184223d5a49fc3b46c8924e5d0d4ae9fc5b7ae9888d06885bc1a8d6190c9fc54250ef95dd8e7667d62f3b63b097da9805ce36e1a2297b5940f32aba9dc2d8ae332e95463d762e4b4e555f73d8a9f5084e5bb93a78fc91cfdd9035fa2d15abbf66373a07a30bb0828762c32f3f7277bc7230577ab125b96759280e42d22fa13092ec084edd022dffd23273aca981c78d892dbb4d3a7896e43c610d349bc21640a5e353869a09fbc88389687ff11a4ad8f0b095ab363401653bcb3fd278e973ea64133ae9283741ef197afaeafb70d25e69709edb521bd16bdbcdc407840933ebd8872bfe36211073aeab84392925fcb180b11ad4aca52bd22344be57fa42d256ce86da2a16b5f4c28497481117a48f58aacfb74fb6a476aa87b4c1fa38b91a9ee4e76904a5bfdada9a7dbf1594968b4dbf9c41a0d4a3a00ba55eb3a15e5a4f79a918fd657a95a7616bbf81e5d914b65710a2e1ef410799b0324d0c9cec6b499e5b75b8a3bdf5ee0d9c6331b0828e3027a32321ccec697d5eeadaa23463bfdd37022c4a18d5e4a2bd6f72edb588374e2bad574d972369d8a3175980a156b345fb1de086fe2942290479cde1bdc4313421b89b0f2b53eb96adf46ba7ef9306675ca9558fe05282e38121a28843ec486567455625f302e9d4f589b4ef0fd537c206675c5a7a5af253ed1fbea52e428a834aeda6d922c8a031f51e4fd4c595398d86439f006ffa4bad3ea8211d4658d2c141884e55b630f617b2727cd541331bac7ef08877db5b7a615caf10fe349a4ac36318e761292e8224d060727455b4e2cbc99f6ec77bdcb3c7696e858781ee721dada998b7db722c64b9bd1727d6bd9f17c7eb32e03c294d42d078c49b599a274535e9c66f3130b48e49cf35495acd2253828892c8ff5b62c1438e164e52cc3e47e2f5491dfc32514df3a08a2d7b7850d5715a42ad8b1d51a5170876dd695b5bbb8446d72affeabb120694afb80ec37fe0aa68e292b237047e1e00c79ae7521ac2d99d00c031060bfa4ca93fe55a638d78f8534d9502f530d9a727fd6198819cc54b2412a994f2f1e6804e2cf330f068cb393e313d633cc468b57e86cfc5efd0127e46d94a4693fba0e909d803862fdccd2554b3377dfd72d5212d7d57c0c2056a15ff8f63d9f72ce64d1a6328823dce26abea34d5a74a3f05c38be4bb9d8ba1944c7aaa437410f885eabe1cdd81c4b0c8ec2483c2de8176eef94105383f8b4cf7b297928296e0aff7ca953abed402a4b62a1d52cb04f10b9f18c77e5aede15860032eda339da2fb6e76a231c7acad1ba7f10bf2ae8891e9c2b9a3327006903eb7f6243c21fe92a701d7640513b9c494ea0709ff7ad8f5d8b8ae79d6604a9cf39793cd8abf8f1ba46358f8a2fbfe973f51d505e719a625a8c969bf96e57d91b1b9265fa0d4c86ccde3f100b89eb84bd787d33bbb10d2e72bf6504e3fdab762349a73c43d6c815ec3626d21266662dc572cbbd3c6b9acd9877cd28b821befde8544027b7ab63a57c2786ed60c8d2d08840bc1725a09cedc4b0a060bbf917b0cb1190442c367ae478c34141360ba40c70937e7eb8b0a3dbfce372e1bcb53365485f2412a8a39f3d49929616626566b4b27bbe84b1f7dde1151fc96eb936f491ed617df118e7bf98715ebff9b3bd40fc0bd21ff71e64610516a64dcfc39b3a61ecac95189b3de20a4f6938fc0db192d95a7f6933904785b3bd5877a9c0f5ebdd119cac3dd6930bb9d841287e3c3213d6ce0761026868a0ae1ecf7e17f4705c83cef22571e23d35bf763ac79e3ed126dce77230378fd2be21a11e4ec77de11fa04076bdeba56ab0b9df9229b0ff64e9e2752def36666ff9385ccd17324bc1db88d9f20c9cc93f7cfa8c3c4261270ec8f6d577d9740108dab6b3169905c47950ec9cddf2fe41beefcb0b2cb4822fee081d3c7fa57357026a462b041a29ad326aa20d9878ec125c1ae3960a731061f51731311bb470451f82776b9ca46eebdf72860b53dbb77bee964faf7af070a1415ad1735720213f725c6388ce977e456c95fb164f99ca9b56a6708e75512e9d9bb8856d7a6edd55c926f9793ffe494f90a946a8d6a9e35bb9b58fb33901cb55ad361629743e816a86e21c72c9cda063eb10285dce394ea6a243d065053aa2336e3a2d3d4e02e49ccac7535ca1d5fac63e3a2ac866d574faaebbe991be958e18b97d68efe0e65de353fa0d45697ac616040db91cee69c8e15f60b47e915da2610aab4f7df6436a0d63a1d78722d83bd07ee7e31268ddd3824d8ea794f226a48faffd0d651a10adb04fec628b70309880db652b67e4bbe94a1f17577c51d62d9014602f15d955b458ac2ab25b64332b311ff3008571426c1ba8c5ff9ef18a0536beaffbc6c86b005717db5a004fdaac6ab8f126e3039c4c58b957d526ca0fcd8ae979068e94cd527b3f31a35401560ee4d00101e5928c1781f406708c105560aeb6455bc2675b5ada399acaaf7da8ba024a3e2377a6655669291f547b32c2815da3e81bfb7dc658b307a75976b2d5fe3e6ecf27d9d3669e227ed516bc949ee2ac581895aa319965bb7f6279410968558404978e059b1dbcf0ee0cd24f6de8b6f82adadcc17df0d231820d7e83fda85112bd8d7941a36825a8422318114ec1160df65bce56429f6ca6406f2fc5e3baa485ae503e832205b6186008ccf4b42130fc69beda59571a225843587425948321ee6c9c3a2221751ff4d4a7b7ef9ec55a39fa16c8785fe64474f65f781538f74e7e86ab4cd8b9c7adbe47e3e787b5d20a9b72d62ea58877f34ec2ff82d940f4bd00f48c869e0681e0af9523c8aec711b703fc01321c9903d136046c710c307e413f203c50af9d1ed64870894a44a0ed49fc5c1bb17b97c8a4c9c58ca72212b007edf39ffe6b944761ae5a7835cfc8e1729af7f33ca148b1bc6748784f56bbddee72559fc16b5314f51f700f15b343d5bd02bb1ac003431d8743d30b7f9c1d0e4ba1095337abfacc977474527283bea0dcbf4debcbc7448704f06f"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f00000ffa80)={0x0}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f00000ffc80)={0x0, 0x0, "c9c31ffb81b8f7d7058236e52c99bb1931668c838396ab2ed9c7da95d921de283155336892039df46b3cdd86fe7c941066e86777fcaba96e58b84d52e6019e3d126071c11366197cd3af6d84d16024a160f4bbd98cd9487f2ce1f343d7f6fc87a45f051e7afc030a629636b400ee1b03181a9e75ee24cb89991f4c5702ed82ffb417e3205e209ab27232df3af166f92d9339e7c3a1e0fdcc3726d6c5429bf9d8cf2d5f33b06db8045fab1b8ca4b7b16228c7adee2eba3bbe935bfed80840de9267689a86b1622786b25f9e6bef89254eb57153809e1ccec92558bcdf4a8e74c59f2ee2a982c586531458c9f5e06edcca54ce9172197edb6b0a57b0f5d7f4921c", "70d816e6bdbb1e2573610fe6e458ba95507d8f377bd4aed348a2ba13fdbc998b34c6a6afbd7d9bdee78c34ef346bd0c2d9a9013363d726e163354eba18336c9e80d705779e2379e94abca4dcf8952c83ca046cdb3e1057d15cf06d9b46c2d613b0ad3490371df13c2d3dedbf74e68ba37a15da75194359e4b53bab3bf4bce0c75cbdd10b523e6b29e8cad91b27872c0b32e35da374f366efdf2f7ded983414199659c4a8ab9448dddac244ce1672d2ff96371ddec435dded5aade0076000469df55fb7047f5e80a5fa3423f25a292f0d644c3d0a4a9863668c5e8e8890e3fdd302da16529d9106cb57d63ecb150d51ea49f1fb15dc0ef2463b4a802f481d68c3b0e228948fafe015b2b0f2b59560f8c46943e39682dbc969c77db54792de245109c5d002bb3a758a92958e86db41449d7ca35c2669a4c723332ead572460226add3cd30221be48f93f664d8ed982b356a8934553ad0d9eb01e54df7ea5bbb4280583fc43e0c1473cfef6ac87ccad4f5a964759626afef787164e1c16f0ca19f4367db2cee05620e0e232d0bca2dd2357c4076640630b8cbd10e969172e88f9ed1f99fcda762461ebc140531f4e8cf0eda56fa143d58d888bb5d1bfcb737578f17a933b28415b6b91ed49c9e3207b9ebc8c1a2a409e9eabd8a8e8b723a7710d4f6b84e7f07c7c779d58f358bc1bcacac074e2d7e289c468cc0df2cf169a436d849bb7aa0074872a7eeaf3fbe2580fcb57f16b1de727eaadb81df38f841dfefb7f75cc6b6948e17a355851a203fc0332a5ef2693992714f754d5210a53250923ee204229b03ac26dbc04a20b78abf2f194845c183f0d786e3ed134f96a6916a9232ebe79030fcb3212122d3bc3d65f5005a694ba78188f948fdfb909a944653983203df617e006dddf134a3d5081c18cf39d37b016a50637831631ede8f00e7ca42fc97f0463dcf4d67ab4c561ac5a0cc4f28b743bebcef5bddb2fdb9ae124a0a09bd900d961c7cab5c9a29133b2fe19f2065408546b9b3f21cd9885bda00043fb795e257d7f09c893a38104dfaad37c4032c9275ce3e36c57cdd6e0f0f030e3601a388b8277d1a2438e52c1e5896c4304bbb9956e355274852bcbfa5ce39eadc0b6a8709d45a890a8ed41e5eaae4c0b5fccb3d6b501ab480694e7da33e6f9d6347f0678312b2ddf4e2a13ec206176f46ce01c5705be2c08a80784d1549614b5a277adf0a7eeed8d20efc3e98eca62db13671154fbded835fa8fc775cda2fbb15acb769675b28d1927256d3fe4ea8903b5267d6360bba729100ec1a261b0a32c69307039d2d4ee0077ea63bfc66ba3652437751c2c11028d122b59326b1c61831147eece55fd320bf4b387b7a747307e37e8f43c6ad8594ee004e6b881fdccf24f6fb922e8ab8c7b22539d55279ac3580703df9e89880cc7d87c0c993268a4c09bda2fa49e8045ce2d9721f52e7de11d89d0bb7c6010ff5d57144d1e355b015dbd97f7e1d7c9a1e462cef4ae1bd8815b3adae1e90e3cf1803990a4648c5bbc9c77e3b4a353327949a0f9eca3f6feb0ba4a1656e901f593faed41fa298663cefa90248a1ccb8de8abf39b03672964616ca70d949417929ba6ab147f47068cc6e251dfd37a9cdd590e8d4c32cb71d1d4a0a1805f0fbd89a9e2ee4830c064ee0d7824f7502d9e2228642eb219394e54a237b0c648b0474b432dd1abe3ce29dd14c793e2b1782bf74414a040661c09cb6d6629a50267e7e491c2eaadbd30fc0d80b49f75df0c1d70b538622b42363a438ac0908720cfc8efe30f925440854536166887d182e402667ce2418d426e458d7468b2274e6b8f6c7502aa5c72bbd7531e82b963d095de27dab527faa581d03474f363119f4c4d7209c3b1fa54b24e1c1c7cd498005e6918048424bb94840e60d8fccf410b1087369ed01677dadc9326e111d483a2d960d397d6883f03e0b3f3a7d3b649b9f9521d026dd7be96604bc8112223507e32dabf234393ecf969f6539825ae04b5897c48255fa2752666b16ba70920a7f8903beb69f80d02345021dbe37a59404e56eccb2bd16cb3846ecf630a44ae5eb6ef9e35d9641d949471988bb8272ea437ad57042c3fa468b533bf047cc731a03b771975feb8e758eca11b861136db1cd93fc7c49ac64623cda3e7dd6ff4f001c1f3a4b7ef5af9f61307f1e89b3b9ffdeeaef604f38f0e9bbfad8ba61186b0a1f6a468c2786ab632141a0308d1556aae9f3b6d3776bbb90db3277f9d26c4631d3388f7d9ee2c164e507b4c12742480b4368ae775e0dd013154c7edb32074d6ffe5aa90cc04cb67386c095975446ae2900002ea6031efebd65d383bb450646aad2b91c3f402643ccb6cb239cc234bfc04398c6ecb405cd4385086c5b2f0100963a708bbf591c6598e181c646225f2207690fa86cee55639082cbbb4f0d6739ee45d1ae4d4761e6d6f1aaed230cfb15243f3cd8239db84789cdc9bfe5f6d0907021fd48ab013ba8bd733270494a51d4790aff901c357ed001719519029fe352d2b4a7762f029c879605c1bf8891520ba38b1083ee8e4c005ddc43de405af2e1e51761aa63b1994cf74b8c978f80d3a2bb208dd88fa8463177462102884bc70ca49babdac226fd9e6fb44ed6b095db74b84ecfc77dae8818056d495b1637afcf5029939f06458226a439f9a55611cd39b256a751706cfaf28daf4435c78f7fdceb325de2ac41558e991582e6cb0a7b4750af97804ccb7b35f907ebbce5549f8e3fd63a2a57350651769bc5ee69dafbf5115732f3f59c5a83d336bac434574fa76c250f1cd1ea56e953ce8dfae5bc38dbd58ddaf38e7424ddc04d06576e452f201d075cd8da57d6e707206d2ef97841c0ebc8b9b1cb010b52fb23de593ac9dc5c54021ebe46abc65ef35683b2d73a458767c8236428dc2957bfdce2c7e02cec37f70284f3d184c02549a4f5dc89d2dded355fece1ed37abefc84a877cdf4c03fda672a225387b26cbb04167e9639ba978da389828246e8b6fcb39f2ccadafd4eb80c7791b36734e277945ec57e4c9e1afae3b08f3ee3f366bd2af02b4166e7cc7591c9d3388f7085e13df4f74b2ecd61ab6dee9215a17fce669cb9d7cc93750bd1a992f151f2f9cd02af0ccbce4379c9ad1a874b17865051966c30a912071185be37bf03fb44252de0a93dc52187b0568a76c53a51fe0e9145f3752493ffbf2e3be8eaa1a33a4c7e1949b709329a2f4ff73a5de399ce0151a7c0f8cb85dfa78d7fb6c54790ae766a4448cea147b99fbd3c563aa8b9cac0f66d1bf01c2b03e6423cd397670ab9ab300596aca05c4eb32007723eab89199cc49f31364dbc36a734e1f229897e328d069530acad0b731207d08a602be6ab74b384d556a6a7956fe8c3d71beb72ab0f611659cf97b5e0fbb8866915b67998273abf3cd143cb2e7bbc6c70fd57ca5877c2db75f368234ea0137e7e6f52cebcf288fa444d1c7c089f98ab662b9bb5857a66caab8a24e1294ac18fbea60acb3a737924d645bd2e49406fcc465477af70a8bb943606850b2edaadab29acd60acaf34e08d4ed993d18857e0aaf54f0ff6757f7229d22e1d53ab335393560ee6d5aeeab0f9704865e7c93134d5bdd0256e93319ba631fde69349ff9a3f3e504a05656b39eaff412e7bcb3157f8d4708c75f39563255fa9f77ef91a49b1a82cf4e4d078735d5fc2384af40b5f07c40cb5007870c37a50425e54153b5f16c8d46398fe7bdb8d3153f5aa839d747f2754596a31063e8ec1083194bede564460e5e7835418b5fe5854e9a83a47a28afb953cec69a7cd40f9a9ae0f80d4e126a3e1c2a4516744784ecc15de38e7b7ba8a0c2d98028ae8d10c2e45637a02c3c34e8ade29e3b771a69d755ffdf7703ce06505971001deb0d829cc30acd633a537cd6925ba2d665e6846147ebf3fa503e74fde4657b50d48bfccc39c80b7931ecf9426a917f35be94dc75ee744a4684dfcc26b2e06727f05b1b63d2c9d846402ed4a6f588a73ac985eb0d31e67bba5e3062f7e048c684375ddb75159998123d8b45c89906d5f07e4aa77a8d9e371ecc8c85bb260a9acc20d0ecde2fbdb4fe38ef4d60d99952592ecd2f7189058ccfd26bb7b41ef5aff92c0587755df8f966762d829b2f1056997d7fd9083c87008e29b9fbad1917236546220b86e29c4da3abed665a426c6a8b39ae0b5c56d1882dee413cf4bd4fb84baca4f331f8356feb5f06dcd4876d4d34ad83e3b66a79c51f6304581f5b58f10052d0e9018d35fb97fe6de3bccaf2993add27dfc667f2858acfab50d4a1ead364b1dbdaed85968c0779ceadc7cba158044a18d819484167a424971794f8f00ce5ff691e005718be0bd54385d6f2ed54e04cc6a1d9911b0a471287824c55653163ce008b71195fc1286588dc3801aa3c148c0c4a75b22d1f171cede035c7005ecd131c6d894fc8516d5702ed742980f846dcb8d5723dac51c84c93cd4fb7c3cd6c1492ec0edabda58672c651bb54754b6397dcf9995d19ab0ee503758a628f4aadcbd37978802c19a42a443ccbfb661fab9a1b1f2e0b91dd0293282d32ab4af477c2dfb2314297243d1f5bcf822e4c37750480ba328ca06f4733517b216a2a7eb11d999367d0b567aa84aa12b69790997d5f3756379b11d975c0ae26368b908c160fecc179a760ffd49c3ae96dfee60147ac35c89b4b69240882226f70a04cca06915adbbd6f7fe194e05880c476962b84ff694074ff3790c2d9dc93df48f30a40eecf6365e35f7f459a8f5d15a7f73c964a5361d35d0d5a1166da86c19e7cfb109f89d8833dbda7500316304bc746361d62ef925cfd990ed64d24289f5590a8e8a1f71ba83c15f291f62039c2d85ffd96f1ffa01df4557703b6d4bd31041af983c073510f33f52ce99a2acc0f913880c8f8734d0716b84ae531b0ff62d696323f7c87fa4eecbf9015f2e18806ccfda32344f1513b36cf66f7a9d60fee77de46637b8cf5172a8507e575b5edab32c1c3fb30f6a2d0efd044916480572bcebbf6df9f6bbd39d3e8b48463270969a2f51bc6c41a49184a2f8ded15d82ea6bcb632aeec9f67c0d1bb54840e5ef6f6deee81778975104537359f45983978db1d68967d55e86acea9facf8f2fbd985dd1e22de868ce95ca5769e94ff0eb63fe007b5ced722441e669e914d4df14a15262f680452978a221c5a461cce81baa283bd32b368780f0ea75bca5e319d3fa1b8da971f4af42d1adc4465d0519796d8f2809c2cc967d70ba7db57ddd8c920dbedc412f868f0be8043d520c49d77a60930ff00593e566fd9ccf6825a2e71e2428e3cedc0ef852f937fa3d87dca9c03040ca9969b7b630002bf9db55d8bdfa926463c341de3f74255230d693ece9eb404f3a7c6e8fcfaa6f22fc08756ed5a0"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000100c80)={0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(0xffffffffffffffff, 0xd000943d, &(0x7f0000100e80)={0x100000000, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x9, "7a57db910fce71"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000101e80)={0x0, ""/256, 0x0}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000102080)={0x0, 0x0, "03a6672b8e2bcb72a67614f13f14ce369ac99db8243d792729d2a550f4c0729b6935ee28f908ac80b6bfd41c1fd5da4ba3ccc82f6cf7b329245d569142366eb11f85b4b5ba27574e80f50fbc358899cbcf6900701e2e5414c684cc46df0dadd52684d087e5879baa735ed64ba2ac024f5bd62a3af2c3ec2cf40395a262832aa4b211707b47c3ea4c69e61da79a24b2ddb0147aa4635bc4856db16d5487786abef21422dfcaa0bc93a18e03740f6cad74a09d1b233f9b6306e867681bf46548d2f49fb5eff96af6361c8ddd966bfbbc07515f3c7dfeb8da82c5da8951419a3e44f96e258f027691149dfd9834327e0ab153c25a82d2095a8c343e79b62472e5be", "dcf7f91087d08c1880567dc5ad588c061dcd36b1d65163a806d6e09c0347c281b515518084f751020726a4dc16b7c6441a790da4fcbd6d150c1ef1d2fc4c67eda1d640bcd410590211c3b17ca0bfd73bdb8a3c2e0dc18f1b94f590d3b1bebf4e17c0d863294994f8c9dc5c674f4ef39a0dde042a579b56fe52b8d189ee62c425f3ad72213caa6e3ed93f5b443e55fe4fa2f5bd551076fd3129fda3610033ef88f993c92c13c15a2680865473854a572d35edadd645655186bd03bd1c88fc50c726269438fa949a22b5eadb2a235cefa91efd6d50ce730405265dee9c2abc5c9df1ef031a91d0c06e91f00a35db55581e0607e51efad4b249db3dd179b72b2b3575070bca9b3f6ab03ae0ff36415de799e9baa6dd906177872d809210f901fb53aa30056b5ac253aab1319e63083020d7653c271217117cbb2949b292e4931783131897f402c3895096407d0326a45a7f58e4f5bbed105a0b078fd91a80462603ca0756afd484d6815aa8c3c76e7a5c77735539ff8cfbefe4ed894ef8d1853d1b93168a84e1166f721d28921c2cbb2af492eb619c469a00008110ae0c64c309b40aca4c0cd50685ba67b2976f306a335fa628e8def2978da0068d342a9a091adf13bc88574aa33b9d2869060c785f268bb9e912c02dfe293c84391480dd34cd6bf0e2e8babab590ac9f4b10cc4d3f3600014cb38988423d15f5095bc828f07737e9a7c378bf89d7275948eb451a54474f74116a12b302a886a5a028d12ae112d69354b0dcfa1e3f0df2283d9962593d7851620b8730354ba5ec4b6fe25d7656afdadccd3bb015aab1eda85364e5a6ee99551c732328413d407508263b5f47cc367651f47c8b7929da62855487720c4c43d100b82c00382cdc573a45c70e31fc118dc4d6bef7defef60a926665b097c95794a1ba87414e2b7b900ffa8f6994a9cccad2a835ac9f9c12c3328c4ecc3cffe43e46f959cd652119aff786482eefb84b1f7e8b11288690567d62f484798ea428bd17e7dff3a0a18e56faf2f56f3d851a382c0b4e6a9b3d027ce07e53d5db847060ae37f830b7e7baa943ac10ff3cc73094cb78c64ae368be9c4a7de0127f1eac24977e08de110f942b166573416b1b6d75084a65a64d255d7181cb4b2e1a8a31d400a57f50ff23666be1c56856bef62e68557d9c58f1a3dd95f6814b37b5955db6c0fac80f625f85eebf220a9011db64a061e70e6393f27820743f679947e1b6f01d2f85de0201565bf5964e886d5ff774d66b13ee53e3de0df69d4df82e09624eb4d6eed074992740bd244af9c71adc702c95bd4bcb9fc2bc0e122034c31302b0bf8d954607aba4a19d4c0c95f23fc9eb95f6cddea3e67bc272bff4c38a672ba1597e180d176b7de0cc5bc609b6a9373e489bc8f66313bfb6e9161fb19a31170599c851e96b9bbe132bede216de8f8e3aadba0dfeb5714e6c40b3dd7a339c6d587ca96b5ed7846f4bd17518ea2d5e3169aceec8ac1aae38fd3123ddd368f6d55788c97764cf8aaa968601a880fb857a14bcba5f34e0abc3b17bad12a78cbff77b1bde05df960799e6f1bfa93bafec7f047a39ba51e789b8cdd9043c5fd7614f750b739a110b1a971848466948ac1d740aa1fa2aa31d10f37f988ef8a7c5e6b1597f5e9c483af9cf297dd8fac44edde43f1cf087047a2587d093220af0e76070f9e450befe880b15cbe34a726801fa8a53a53c180f6fe20d29f624bf19e13611c9a4c1f6060a803eb5073acd25c09bf5f0771875f70b0e760f46310b5dd20e3ed9f9cfdd1722dfa4cfb0787fb35a67fce9adcf74f1857d5ff19f79b91e15c3ced5330788731fcf2046f289e2cd337ebcece48ed2f2ee2e01d24055eea68e551c49518ce2694da80432f027931b58822e0162d73412a1d2078a14d9d783046ce6294cb5eb161b9a55075e8bac4f6fbd53c3f4e228e94854a3e464d1d129595c1016828317cc617cb4cf324d46a77b45d658c07019e836698b19e4947583d3ddf175b0204a29c6847bd44400e5b79ef44e06d71cb68dec188fc6abc810af5aac94a43ae400e963001347f29eb21ccc4b00d1e0f3b352f0002b8a0a4ae3b949b7eba7832a1b1b10c53d02bf44ac2c78d8aae735b8b04ae9e88e34535532fb2f22eaaac766e8e990334e3434912bd278af7e6a6fb61aa9b7e49cb2e1d1ad43c75c18bb7643157c9c291efc3dac0eab5edfaf63cc01e52b599b929305e74c6d345e1f156afc9cea5e6c0ecd2d065ea550c69464e51982457c0d5fed490fd82d4c6f88c8c9b4c09fae60e53278f7efb51393a694e359b4a187690795bb3b06dfebdd15d06c6040006db79f75597dbcfe888bc5bd044b2f317a9a79e6f21020ae9e509c5f2137d4e973e000111a8c1571094df1ae2f0f542ad58a35f68157a87276967069e4ddfc4c9586fe80855403c09bd9b10fddf436e8731cceed4b2efe897d1bc10b457b04bf6c881973d073938fd7bec18ca1adce392c0444d691b8eb72132a322a46d100eb43b9120b0d4d1980d91361069a4e6a620e1803e2563e811333e8a863ba2db5a3b4ccaf7ffd543aae3d51b0bdf88f53845eaa531521fdc5f0e26fa85325c208efac985099cdd20d0c092a4f972ddff3a7f8d47c54ff396330b38e9f8d33c331a52420dc1066c9c33295495835973ea5461d0d5185ac31160579e169da9b6cccd371a8b0fa1f9d08395ea7c9f95e3e522f2e6e89dfca48234a54a4d4164ae6b47dda7fcc6a0d852b7831cc51ff36554a76bb7a6b15eaf11019ddf0773e631b266fc52a9194822ea6172fca59644c0cc644a0386a6352567e5d1cacef3393f4c1976427eb013a91d679cf5473817c5e633b28f8a1fd0845f7666dc83372adebd794bda7d888d6766f8a6761300947fd5fed03d27fbdd31efd732d6db79cea9b37cd63225ff581085b4e5f33b989566e80a5a4df56bac9e10e846528f833e814ab644a9cad1259cd2a3ec98c57de0bd60df1d83acca4eeffce3c2ff3d416fd16c984db4708b0e52a0cdbe032f80e444e6ce889c0d3bee1140c8a8fad05cf79adba416f3c0a8f31badd74e4284535db8098266f033bb5e16b0b3d987ebd33f6fa28b09b3c3bb7e528ed7410e808b930cd2c308b396d1679e6b378e8b9951ab3f6f5f8d7a1ade30cdaa723ad84845b9717bcaed9ccc04dd0880e92f4449ac6c58c0c33fc40adc3e00e5506e8e26af37fc2263b31dd0ede115b4074c96cb395b95148d61b2909bc8a1db600b45f7663810216ac1ff39e5ae07ee8e620d7cb14e64349f47302530412d7de994cde7c94912083abea3110ab247f92d869203efe045a2eed2ebd53272d5c71e54227b59ae1b64dd7688e723eca45403d68fbb79eaf0cc250abea45d9f10880a74f91f96f646779d79ddf295fef673e3fc38895b33cf9f120b4393fd1131cc34adb3f6bfd5fcc3119ecc2512b8cf6abbfe48d7c6af8e46f44bec09c0988ee781468e715eb13eaa2218517913bf04959a002af763b7c617250516849d4df3c1948c141ef2c59583d13062826591422e02105175e1fc764eb2d5789c7dfead9c16c39be3ef90417bb3ba1861f780b0458a9aabcf9b4da334000bbfe471c643dd471411b6be9eb04c70451873ef4ae9b3794f02b9dda7ea871dcd7b0872089869617b0793c1721ac87ea2dde66a55b9135fb463b120f8952b8e7d3d6334a3dfaf8612cdda2bb5d679656e5cfbb5b6c24c2deae76996386b94b0b7f411748b53acfa79f9805fe9b0ffac5232ba819f8ee7cad14072de40a9c35dcfafdd4aa674ab59d737d11ec21bc9f95ff0af309b221a02873d47354120b307bb599d5aaf9b735744c3c6b884005d5487e8b648b09562ba7083cfd1201dc6f7e688ec87765fc2539d2fabeba8ae849fc1125262be9ebc6a8f6c61168d9139aad4f10b85714f297c54a4a4c3a9f5f8b5a30923ea84bf61759eaf6f134f32dc85777dd3d05a0f8786992b37629810664cc392914c10218b6bc2bd6361ee0e0fd9fa5fe6b4acf56bee518748635f23b564ba898fe58a897188b5c4b3f01a42edf842adba147b3b1971dddb02f91d2af8ee9c5e1b544a57d6fc2cddf735c0ca53db280ef8c46782325773e24d91b659c666afe1d9cf7c79e20ef7e6badeb41e5036ec87730bbf1d687c88c919bbe048d471f0e0aa8f92afa231fbeb17c164750a9cd53523221662306e250286e550a5fe3990a27d7a0f039f49e7003db1b9502568132c465559489fd7a3801ab627a7ab30c5d92edb8dfa43ce9c268aefab47b09fe3551f63a47ac35a2e762c471340b2c8670a13c454ebfb17c52c7b5451e0fa3491bcd9cc697c30a7f4100e1041e964f4d6e09182e4ef9997b1908eff38bf457b63483fef3fe1c6178fe1a038f761df303c7cb58806daf77c3fc76934e99de69add57078e8316a0452499a4444186069810a4a53ad4c0c5b42dad76509c513af49e70dfd19827aefb0de39c95e60d3a98e39bee33ae1b0b715a2dbff58c4d7783d1b863d257e5eb6d169ba83cf98c9cf314c2f1c8b4c26d2cd049ea12510bc751bf64155e5f3225c4ffd616cd97ddda68f9d0fae9e462902b458ea8bf40e566b62da21708ddabfe093377501762a90c87aacdbdd5d441037128165e5b5ffaf57ae0a323d55039541af66d454d0b654110809295ba95be7d4a8765ab6d67c6587e120454b35fc8434fdb9be4ff64af45722d47f59a51dc26a12bb3e13e3e08d15b8db37807e2b74336b75d7c379dcd5f069db5bafe7dfc5c4583f0f82dda311ca431480236c720574c20e49b032ad243999401318c3df48c4ef493bcf3872e0b7e15e4750f4a5c476edf55100e71029e0e973fccf003239371a871457a93e5e724f8da409b2919f95cf892911f1931a2221d619dee6bd97ea121da9bb27fd784dfa0ced76cf3de444af7e343074d49ea63afcab86fb6363e52a2f70bd797fbb8f267e295526c26ba041d93206f09174c1697f7619382bfbae5ffee461425b2efe91f1dc96cf34ccd9c4a31d23ea4347cf65b9b32b674f26b4ff61e2ea9b206021645b3b4bc1cd62ca25d1e4afc0365ffe9109f8e53b6ef122a40094dae5f9a1758ae639d5ed2f1a3da21467a18d5d949a724ac8ef6022675df5d424d277f4e5d326bad195f6931467acba0381b7e8d0973ae450c5722c2381c789c38ecf42516e5467dddab086e9c705ef2b40eecfc12e26ea77759fee3e3f8ff5e5448cdb68df146c71efcd1d93b135348aa4cec732623a1ae5247662096391461d77abcffcc8f7914e020f51c1ea33ad61edd48867f95745609bc2dc98271f87e43df58784e88e48fac509755213c363f8ce1e4c294ac9e5626436d67980421dd98acac80ab2ddefbc9c9796e1cc181a81b3fa0c2b5f4ac00d68e3f6069add634542"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000103080)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000103280)={0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000103480)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(0xffffffffffffffff, 0xd000943d, &(0x7f0000103680)={0x3, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}], 0x2, "950382f6725b97"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000104680)={0x0, 0x0, "27e3677a225f0d61f4a2ecbdae356974b1365b4403d18be9ce490501493555c5f94283b32da8040469251e63aedcb74bdfea98cb136aa3496cedecdfdba273ec4c618416ef39ce95d65346d713deddce0b75d68ca0aced5458e53a73bbf68ae26cb887f5a1508dc13227a9eb29bc48c011efec8392df48cb999b14ded44b5377d5622cf5102b7371f1d0c6f9e6e3791284bd158bb28afcb859292f6f1d7e0191cf4f581fcb58c547bef487dcdcc64af315e300c27c394447b7af7d1b26c21856594ba5376e0f07731cebfa0fa721e00c21151e35f4c9917b7ece95c395fa1d614d9d9730c28a953f76a89a251d4ab46561187780f82c149077ade73b95cfdc9b", "66f76911dea4f206956abf0ab37aa7e6822074786b9e2b9739549dae6bac11028af657d3e6c81e865830db9edbfe2906097df7415bf913f31a38c29f544d179ba5cdcfa9a4279631ce30dc3c5162fd157ddd02a8c3f1d38217b1d26c5611fcc2955e4c68f4b422731539cdc9848d70f51dc52c4612238669300dca0b88bf80e908692a5e3569c5e0d81f1ffdd63c1fe0bf629334d829b9f431e7aa3823691326fded979f280fa5e48a20a469e23927dab2c56bed776054937e6ba6b6e1c4b8274685309345818392472091e4647326775b6a62a783d49f3d5a9db1ee46b9c7c61ff23f3db09eef4aebb5afa5d5eac643fb97fe04b201202ca6f7eb054fb1f35e97d1c431f6d2d1e53c8f82b76b058a0ff578bbc3bf227fb7a46b5ac88b2a5c76bfc281da60790f007e70a8d9dfe1313e96f7a70105e586d8c00e42ea1247a0dc6dac6e1b35a73354c2264d2044ae9600f62620a64dfcca66cd7528356334c97837ddeb649d66de90304c1b471d8d6d40c567e935363eba23bd4a8f92c3c7112423c5ca31b03742ea170cc516ea73574cfc730d73cb2355a5bc7a920c4a38c036cff931290a2b7090070fc57944983f9fc6a9daebf70e4a18a080ab87d59bc11c7b02532c1f9f0a3fbd3bb466c8d0b03bb926cefc2de0956167927409b9c79f1b8787ddd399539b98243e17c37203ce5278a9eef797d9ce5e6869b740742543e60e535d46c3a788fd086d41df1b33b1b5dbd0150ce9a64fec0bfea7d910b7aac27d057f2edfaa1c8f5fc64c67e9a049926a8853fc592e0fd296798d09277d796101208ce678ae80acf50bc2a84b2bbe21d599c0fe92e38f6ee99103e9fff1effcf85f38fe4b400260d18d7fdbaf0b1b4c98ac555702c9fe18e5cefb87f42e90f8361d3bcffd55c4125499e96b91ecce1ba11a07262bc42a5c22d9ac1964020efd1acc1cb19b615424d698b7e46e474d3e13d2a1f41addae8842da7b1b8b405ad5fe1312cfa019815f526d4a2f144690dee2a8496650a5083a4d88463d2649bab8a4e3850465f3c06d8087904b46d9a75e7e1bbbb5fd5348cc80ce763a2c8529cb3f104916073cbe587f0cd39a7a9cfff5ac7a40d4a7173a0768ae2423a3d35af83b0c645a8d6c38960d9de30c4c362f062ce0ad02037fe8772d10b59221b0676884aac6039b4978fa88451e63f3eec074f9ff243fe799da27a3e11012593e8a55f299cbb7d164c8ccfe6c496fef8a01e575b322f821cbf7a7cc2b550b060c82da0e1a87b5d112c97c888da7db68256c26efde1e41b5832e85209784fbb231c93bc810363dce5e18ed8370f9f0c7fa0204243a736aae65c72299fece1b1938c0851bff363e5af96eded55d2299a9f880203ce848490d5a3d3faae2000df7266ab3a3acdafa9d928f1f47129e31b07e95b62bb1575027bb987ce81393da8306107758af44c6e16ee450db4648bad36c25f0751afb412451555ed92a93a9353bb97bf8ffa65f73b256f8431d15ecf4530e131f0908913b22cea02982a79cd3ec52b68d0ab447569e180cb644d32796ef03e01272a15ddd69a7e1644085938937d4cfc75646e7e67096e294e4c02c450bf4662608a5b55e2151d4cba54659269a4d7eebf5e102a33e5a576e18933fab4f9eed636216e1e3e0e55ff61696cf58e2876d467bc496ef362f2390e33fcf2770529ed5890fc8205dcd027f865af9e2b7f7fa645d39beaab1c598b019da7d263484583270563908581154add0e416c739f0b73d78b8469fd26d91d9934d92fac5410bceabd205bfc8eeb2f9f32b0233bf7baf834863f2b17cdf6e159ff70222d0442349f9a45fa490f6262a5762d3fb2d41230388e99b5ba6b1a7cc235aa8efc5199b234265f6057beda381895a3c35ae0cb3f7a40d6e84fb904e27b9cded767a4bd0d148c0b19a9e6b99db2d7e00adbfbb42f05b89a8999bc227ed38daff2f882c8049082aedfe2177ff91b90764ff9f032f728a62322197b85e71aeb1929055a0795f98f8e5818cb5886823abb4f013b722972a66b22a5cdb6ea414798abd1956806d6abbac30c95dd305a1f6990eb1c64538a040e02de92acb6fc8f64984e39d6cae86555a8aa25991c40625e08aa80f9373b925274cd1d599e154ab4d3b345fdf955bb96dc9cd578d285d8f00c3c300091c04e8a064d8b67222f3e4f3fd36c3a0e9f6b31258799f796de3ba103ae2565f238038ff58126ecb844cf00bcc7b4e7e0c4cd1f7f699e65b8acd3151cf5a79bd48fd3d0389959b34b51fbb0bbaca6437654ff435291d787861ba9351f386f1a380160ea2207eef46df24e8526cd3b0315ceb9dadfaeffa9b5181e1103be8032580f0689e9f551aba1ca7a5fbb70eb322387b1c9e55ef37e1e1184b847fe30c2d8413e04e849a996bcc3b4b41a4f3f3c9953887431bd23d40af29a802c291e81dd7b351a11dc6acf2b7c5cd3ba40218ed4dde559f017ebef571b5221de3d16288619089f998e5f9c7aa5846d030299572f68a9b09083131cbaf7824042fc2a2a55b5af2a8a3f9f1e871682db7701662d5932bcd09fe32db35f87cecc6abf43128fd22b55dc31e1250934761e145bf3b88d07ab9405769661b9c6863b744bc338bf6c49271ca632bc0efd75f2b16116647691a03fccc78479528e122efca3679e880c941a5b1098b8e1759bcb62138aa3b9d2575b33216a51a9d41f8e37fadf6b3ba4687f854ab96c6bfac2ad1d554842767e6e76d198cf89b4477e3dba9c4c2abc646e9a199e53e5e9aaf05c2ffcab4de7645c57aedef3374984b3cf9016036e3251ee121dfddf92b788a29d78bef12322601a150c394c53dabbf2acddba8328a78b0117736b9dd2fa32348b372834bc12159750b854e6d61eb560d37e4e74b2d9c5c3cc566e4a0d5e02903e11d854310101607227201cdf68a67013b69375b9a76d40bfaf328fe4afdefc571aa4860dfae537d3fff1cb0d756b828ec895452ae41f5a105122d03487047b59feca39cc8e7af315c3e21f1531d7ed823ef171b824c51bb67d482ea6a880dd51f268ed663de33db9a1f5183f972d26af7987797cebbd624cec6a1e229678e4cc45ece2b03ff8b3e7d913b062094f0fdf92b4f12cc24a4e97003b5c1aeba00839da50efc28fabb7b13be125da0192e00f948f16e34f485e8e5f9dc361298809da8ccb29293cec2ccdc10d9794bad599a65c08f1492228ea041a5dd4fe2e451bae1dc8fca6b05944e66666d9bbe48eea7c8967fb1cfd502a8faa51eba399d3798b22ad7b1e166249f5798a3244fff882cc57305a31ac362b824e91ef384bc57a3a63e8124a878cb730d78b11e6f00200ca9420cd1c26f374c1624f9e45a2f9d8958151fc64939ddffaa3743f9b3c0944be5628e49f823da58d5552f0a62bc4b35483a51ecaead1a5bba99afb7e3e675d3801457b6d38694919fb831a11741d4b4afb6d400674875177c382cbd7e91ca469181925922c791fd08e1bcfbf5f8f41c140a91967be727a1b5e84b4aea90ddef4dd41ff67cf23c612d1d4aa175f9d6e42231f32c0b5a6a381afc9ff7d45390631d56c3654cda56bb2187a7a039f7d72b70ba45b67b780afcbace9e78935ac82c64a69f3b91b1ef35e74ba5cc366e4eb1a7934b5e27159a7c96ead8935285d930991261bad7ff47457e3591cf9cda2960b7b48538c91c65b27bb9ae5c9f43e6257f4e700b9c588a43a9c756e1c19249d37f2c8dcdf31ce542d164915bd0b13cd46d40614d63a575a2658ed719dadcc476686ba59e590e2afccad6232694cabe869e8095ec6282b8144ee297fab5d2ace67ec97537652603d491bc6808f8cf69c03e030983415dea7a29a93b752ec5476932341d5a40d1f9ed3b2ad2101874d0170b2b2614e3cda38e7538d3b5d3f766fd15dbaa83d5b0ced6807ea043134863cd7d9ac9c2e8e27e38febcdec85e62a055c2eb82c665d818d34c5c8ec83429f5d156bfcc11da20627bff6f8f4c3394dd66d272a2c8364d2d02f4faa945e5646e47f998017f52acfd80767321ea2cd32282fc4ca5f48999cc245658afdd7cf6748e2146c8185b58bca65759089141f0fec8020f19dc1131a6f0a916621429b03a116afedd5da2f5c5dfe8f01fcbf90749efff44f242b38744cd3b09b22718620889e96e0fc062bbc74fe6e9caa7e2ec6e64c420a43e642ef5c68d000007fad5de55288a9b38a69d721af88aa1c3389559c2d65dca52bc1e5d94b26da3ee35c2112dcbc17e157e1a2ace436b3435cc63aa19515d18d4c1fd285d2825efb69b48cbe6116d927d096a416d8eb220b784e8fc1283519a310a5df9fa712b795c5a29a931d65444a48df7d521b5095784c1284609dce85648f902c118644aba7add10832b3af2691ebb72ae5a1f3e9b1e3ebb499079edc50dd9bde4187c156af53ebc373fd8c0b2f2cedf55edabfef8e8d45421e44866fd449165547675f6d19cec091c626ea24aab8d00668e3c4ee398837861e1a4b55c04723d725bc9c0837250d338e30260ffc942fbcdc303ccd84f0141c3c03e6fa7d9dcc86e4308954569ebc812edb2352b4197ee719e8de3e1c29944a95d82b5588f548bdf447f8ae671747469c60337aa2df731c1c3238a867dcbfa397a98715fd824a817f4a56691c9ff8aea4999e7497a567a127740c020a588482e49b087a4f7a40d3ed229f6ba1887eea93a168dd5dc8ef4d823a4aca0a722a192db66f3af857686e5114b871a70f9923f523502fd39159c91375037ff8d71430e9906e4753fce89482fd9c081af67a289c72a42afadef58f6e3d45bdec858de12fa2b5a8617cbe7b3a2493ac62c12c92578a31478d765f4e053e604430430c96fefd5106b80f8bb035a89e594c8fdf1bf5b7aa535afb16fbde9ceebabaf457355342fa75588630e1f241bb2a9544676d0e40707880231996fc69c10268659e99114fdcc113d723ddf1bb5f0f80b73d455135c5f12fa85196bc8349e56e079bd64680bf57a5db53cfffcccd5b29ee0d38322e9a731c294ecf7fdde700e74b56defaf693e25aea94b419f7741c71b7b679d57d34bbf4b941627365dcabaf2ade0b07f98e0df91fbec898a90479bd66a34cf5e9ce4092da0a055297c80bce7d4a3ac91af4dd6be363dfab95d40ed9bd53c2a200798627ea910f6d7e1f978069863b50c19c573ba0be952670e4db738f2010fe84f70ff130fa00868aba1be885458d30261f95c72d9270c449ea07f2015b4f5aae13e2f26f3a71522992504d384468239493fb30f6d95883aff38e0e96c7db32ef4e1992368b8fbca22139a7ea8685a184b044b1968850fe4bc6dad1390764bffb747779f3dd9cc1b6a98897b83d5b6f9c9b9648538f8753588b710a7b3b1a066c80bbebd59830002c9a425b3e0a493c19290800e8a"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000105680)={0x0, 0x0, "744fc98512ef35632b74044d9f0aa647a109bd3f720e020fe82657832f04ac8412d37276beb813fc4ae5b411609f3bf20c2404025642f87a82ce309e5cd388442ea5342e7ce658ae1e00246c61b5cb989e72809b53a6f88ca10e18a518841b6399168280a719280fe7a6506481b5b860c8e9bce82a11a708f3a7b8786f21a24f9ecf7d22aa5c367f56e0d0c01c59653c50ddce45c35ac09e052d0aba4b87994ff8b57e432ff49bfb6a9a14c37bc8963910c5fef31ec1227dafc77fbd77a04db69ce780e21783678d9498411de51b06154182ca6a6140cb9291cbc5e18d720cb3da20da8cc415e81c5b80d8097e7fcde8f4a31402f2aa1109c03cf729bf05cd24", "83205eef629aba17471293e42d97546dd1397bda378823d6133e66ecfa40fc1f6cac69c31822da88331be1e3e3233ac3ea8e0cfbd941480fb3c9c81507ea88b02eb47b189ba57f1451d6d4dd9e098729e00e4a20f78b5cf91c5ac0fcb4e80eb97477f09e6ceb978351caf24ad9e1d4fcd4cc54da7ebaf3f3699a8dfe6cd0c4232d9ead083f83b9e9fd891de827fb3a79b833f93aede8abd122be38f205d71f41f8c3dfa9c1e0ffc75ff4311f92fea58e5e918d34014ef45486b1ac418e53246a45c6879f9539f07fc3da8c8050f814630f85f6f4b14c89ed02e0e923edfd395a76292562533ef373d5b59513035ab7c39de001189236d8e7dc1c98e717509ef3bfcc47cac73f296ac92dde5c1ce4771d1797c1ee2c5e2a3ce9e2a354c4007b130d35460b2b2fd138ed3f6e1956def838dbdb4f0bf2add2bb237d51d171a123705d4721b423cc1b5d29d0b005377fc85f027ec1f7c95b3a2c6397cae951eed30d3cd044ad4b6c90a9982700079dbda64e2c8279ff8792421bb35676765dad534aba8cdf5125646f055409d6800a91d5c77a6f97288b0eb45846d0116217cc3068f59f8ba759ee0d1833d78ad1907757bb959ebc6c4acff9a5e0e30f95b9a881263677b99c1ca79e59d63c6a0c0977100d94576e8f933df168efe57cdc1e1f76370e6bbc754a921d4a095c452674cbcb7f1dd1eac3678048aebe5d4ee50cb8d8fbf3927cfc822c4b9c62a5b3d2f8dc4f921280efc4798fbfe6e16a7a95f804655a349aff4e83c59f472204e8b89646cae6744e01d99df7f5b2c55f4c22f377b985fdf1c36b677286db3aa34b39c615b5256307592a7560c3441c2e9ed460c2174c80606d23918473813ef77bf880a65ca446b8aa7b356e43a3e8ff13fb489add12fa96459222611b763694395a9a98713ed9c687b9926e7322cf1d1edd6a9341037bf58832cd0a23052e1baeb06c9ff610763fbf8333b5e72972f3d33eb99f31cb32853c2858e2534b6fd22ee9768ea555f38b5238570f9b08f3643ad7ddce9b3dab4160ad134c26eff10dcea4d758992e6a07a6aa5df6633f08424e4bfd930cc342fd7aaba46748a7a2f07de1e4b146378c80c77d4a7781d24e0ee741251c469559992085fe690f83c06eeaca8e0c6874e02b8eb09a0db2a2280c28d6b6d0a0410f9ab05417906aca685a734f7daa3a5665fe0956a2405a62f8d38b628a650974f2aad120699eca8d72fb8bbab9d8df2ee4414f9a82bbc15c63e14c3ff9fcb969d98dc3d6aa07a630e3571869d940d3b9ee4aa2373ff41d3ef48866aafcbd08ce86fb7bab86aecb9cf37f23565d33f243823dec518e0737879df904ff9e3fac32e1bbed0befc35f805072fb71fc5fee5e4dab85c4bb9afd06a1e6488c96cad860e1d2dc789cc715f011c9e44568b29516ad426aebbcc7a46dbf036fe67108c08ecef6afff986b0e96169313dfc2544ea262ff6ac57a9e0c288bb285378965d77caf451de325e9d0580eb91bf1b93529dbecc729b3c2f0d2fe683d4b09ea044476df9ca0d942b757bcfe6e329ca1814fbe13f95e17d5603e2828b7407f0755a52de9e1fa8c83edddbc08aba5833edaf81bf5e16f4ab02885acf7ed9083be1e3314c8e38fd35f2203e3614c9ae285c3e22eb7db6660cd9fb96c8f2f790be092c5d1abb1b509aac5d058e13fc22dbfb1e26bd6df26f10c4af2e8cb91548419748073a4f0617ed60189767e19f184a6fbe4f3ea8c36237a3266487f646d430cda72e9f4933ea726d610349bda41e2240808b227698c6ee710e9e40d30a74fd8a9b5110b357537f6e0cdd3886db3db85671937ce8d7ae3b58095808aead1cd7ff42081289f27c2a7cee97bfaba9cfbc52fbacb08027e74f3c1f2c4bc4990f8bf5573d51d4d753c00e9dfc30b37e122628333af7d8647fecfa6f760fe4cb92f30ae4592bb8ea4037e1e08e118a69824304e8d32aeb05f450305bf2bcbf0edcb9f857051b515666db4bfab451be1b71f902692133f56932ae012c42cef892250b22605f1c967307ef1386a21c7027ef6b61be92acfb508574a0428c82b6da14d92c826d516a8008f8fd6d765e4564c0d5cf8a264b5f38ae33a975eed57ca58ea815f64be81ae62e12a2163499f98ce4783e093603396ca7e9b431404bc01853d9f2cd68faea315121cb2addd6431870483cbed374ff887033f2d676acc8c6d521b329afaf3bb9969a61500e5f2855bce2c6d526e2dad63e35da4d48b62434632c59e8f9b9de47a1b3c08b1048da18ff8aa556b211787383203a4569ec77b1e522a231201a52d85df69dd3d0df3112c7c847f50a79126531fa18b4793949ddd228908cb3d78cb93842fd45dcde4e7e1a700c9615c19dec9295d86bd148366eba223d49d71b0e02fd6e237cec29a2226c3d201e419eb277f05de306093d80782864c5c49d30cca418b4527c13174d1f0e0278ae41cb785f2b0b81583ce5275da0a2b8f0d6b4745c6ea053d7abf39f2addeae638e1c99e80f3081e48df27918a4aed948b13f11e9a075446b9cf6437f2e45ef8298ad6ed494215c0f9b796eb9b5a079033f9128b0765d04c9a3e583bd11134dff25858629df139c0d5d7f4a60c868a89f5747016b5589b96ed96f66a3256a3ace50b7052952612b25edbf3534f068a2606777568cbf2f12d177df0c7b57a3b9435ae08f20103b5ffe5ec8f8c5deab70a5007846b9e9fbed8b07424e3099a25143d3d33ff64087452198191774e0a1fc749e7ea98e1bc9123c4ac94c64894d2556831af7f8deff7dd998cc0f545156711894a0a78f2afe24e6127939712cb3d4da8ef7836c10459104528d22619c906697dc99311df3a76f645b9fb00a946a31bda34cc1db70a7fdca696a495c436004b4555783c9660168eb0654f5f70f5acac37380f41a41ea796c34c863e5ccc10d1657ae556e6423f833747fcfbbc611991b67f485b294b0f563b6fbb0dca89273b2bda634e90b0a34ff57fc1818a74c483cff8addcce9f1ccce729571ebe1c597115dfe5a1a3c6f14d80352f0bc3973338f98b66fde90fe5c793b436b7acd72d5543e4e9c2889b75060e4514d53930b19c3132d26aa9c469fc216698594635f40e1df71b7ba117c8ff62526baa66c8af76795b605e2c01f6d68db11446ab6b470375829487dd848ddbae78bde3cd0d6d5c2c025c6fbafcc896b7aa6171693b5bca01706c217f03f1aeed2377403099d59c7f4f578fb46502d2d79de069bacb73fce2f542cf1b252dfcb8d048c6f201662e9cd407f03cf4532c97edf5c787e5cb71396804b36a686123b9ea89953c6f44bf2320e33481f1c387c02c7f4a9c6f2a1a787af3a4c9d9aee6f23f0ee5ebb56f501ce578e43198ceec2f2d015bcc68195f97b11747e412d5d69c5b614a030d0472fbecbd3ee3c987eb051720ba55db2767321c067e63c95c7005fbf017d8d1b71fbd9c4777b3c26c4ade02fb116bc3cab700a15987b3356b479317c0dabff9e2e9198e49b0a1c3da27db65a5bab0e0eeb5fdfec43c411d3f0e548e88c9b0d70f8cb18e74a58b838473b83df51bde8417bd53b07dfaff69c5791aec8d7b0b9e5f8907356fd039e3661b207e9ab54a50511f8459e8393c4598d77f7c5428fe9d20ec2e9274c0bf12f2c77989c58dec6922ff68974cfaa01f32c4442b850882b5236460a1aec5b7b378a65c8c00a36e42c8b288b642c27ef8e80941bb3d18814f7b045f4488baaf39a6458da6c4c31bfc00b4b595220f5458278f416e8c824179144097a6f2bb569bb9a9131289ce1613ee79ede143a371234ccf32f212dd2b7f7cede67a673fb9731f87efc109caa0f00ce2fa10dad838571251caaf84758630478aafbe8e1816c0024270ea368ef7096af8b90f2fed7793016a5cfa617951e429e83154ecbd3c36e4d2c29b4625a0ee3c43619b410d40f09982c8dd033e5c901058f0261b17bf6965e6dfa335d3a10f9c6d8022b110f7f2e2d87d9c29018df9d6da7ae4b8144b232c269893c2ff9435a0a66ec31a52454ceea140377d929b59b54ef1c4f6f7f768b5fa7eb59cfe33de7f24d4dcd31dd9b96255c9c3cc413d33df4801d035f73771cbcfa72214f7e4053c6df99213a3efdd266f15c67dc6c4821a9d88bd855ef3a774bf7d373604680cea4db89c5a5d13c7bf63839cb7c62d5ad3019ad115c59534bb0cfba24cbf3c4599e558ea2f2119686a11b6eea6f570105029c863051c70f743318ecbfb40edaf8fef3bb706623aec969e9de234399c0b7c66e5763e03a70f8e06817db4c744895b5bf447d3507688b6fadeb2b31ef7680de9efa91304dfad4b6785b3ee7ef2174720fa49bdde4d1ab0f90ea08de5cd83e8b5ebc9d352d4c21abe3e2346d9f27cafc4c94f10f9217a3170a5116d73d693c25234e368a5d5c9b0a6df35434c16a31718ff7fbff651bcd1c0d6fc20a88c8367866a44ad1d014fd482d4648916a19e535391fa19723cc0594b3bfb5b138514e16b007e62c2c6e2949ff8eb546a2fe7717334c5b8357e367bc8daf24fb8b99b614aff676212109c6e0bef94401608f61d5bc7dda384ce8d4dfcc828cacdfeb749f929e1a4ce5abfade7b88ba1c2e180c505cefa726964987684a00b5e4cb9f1f017863e626cfde485b9b127a04a5d7a756a483929af26f3dec108944ad046b17741d66b779cd3860096cbe88d540f044af80aba814b6be20f06170a2974af49a1e6ef8851c4ae4f6d21e3453f74ff388c91b51bb75af5e62c77b8af9ca8be2c54e129b5fc4d30165cf41beb371e6212e723fd75e0a9a66f4c7973e2739fec26fc074f1a877f6fc7a3b1741ccfd1a828f0e7a91b55950fe418422efd6607007878ce385a0e54a558e6f1defefaef5925bb403e2ac0a80fa593e4d7667defc3e53673483bcdaf9e3420ea7e77e0839738b04617d2754bcb72e237dfbbe57d59e83e1f2c0dc4db068a4ce51eed10aec33b25438b3d95849b812ec3be36427654fc2e4db354597600e947e3cbd60fb649a43fb8b3e08b8624bd48195678fddda5208c004b6dfebee3d200ca6345538725d0a6c02bf58c80f53d97c77562de0d646f3e136b1fd6f242af138dd2025b07142f38ddade5ea8cb144edfb14178ecb806b9f7217f31f6e6a83ecade57d66e12219785928fd1662d5e7112bb6cefa3e76600cc6dd008ab056e1d6d23d626175b73357f640fcf8d48a10b0b26201f29d6580dedcb18c7e3a672aa2467575c76873b03d320d59533fded6d98c033c72c1831b77e02396d09671c05c732a1ab28492dafe25e2f8222f6c700507e6af106d16fecf1a9ac4ef868b9005c6ac58c2871bad4bd019357ff1ab6fd588e069f095b3b49625b1fe005c67302117bf0290726dc67f53137b89c7ac7ca222e60b8ec941baff3f1025f"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000106680)={0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000106880)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000106a80)={0x0, 0x0, "1fec852be121b96a5fbfe99da5a4bc3ecef6691cfa4d77f71d6a598f68a100e6c424b804fdffcd7cd10fc151aee6f7f42b1c2809bfca3b557082ab336f0e4f3c7cc4e7135d909b00b6dce17430ee2c352104ac341389c4d9269316af98e9d9ec3616222e1c2829ea05aaffa48a7238f97b45a4ab24b3a4a3ee1d243e3c29178451051e1e96791c18ca65dfbc15980c2481288eb4995b415611f1e3085377f71d6794bbe7041d73bfb1e8e163f50a41c7b7992dc8a23f30002f44edfee88a81590872129892271fa08391411563f701eb3851bdb5e0d0d1421524bc8213d2f6bc1f2ff7409e062388947df404382885fddc759601785d436583183c4da22190b6", "a418ced7323880a007ec84d6c85e09be9ed2f1b170149d3e7790d720697f6a5bb75de6dd328b05e30c0931932734e811b223f4e35cfde37f72e1281ac47243db7efd68cd782600571ebec44a9dfd6e9f2ebc06acc2d7f60da331dd11206694725d8329b7633cbc968d0e77c508394bdc40df7e863ca053cf18b5f0bdb0514e87055037d66bd8d3b739f73845bc073752b025a471ec9c376960f8ec7580a8149c348a198649d1e408a8620aee1331783d228235beef6fb22e67c191244117634bc9b3fbb63a7a1a674c0ff52781ce454364730eeb097062d56ef93636216eda2d1b82ec243f34725d82c19b521db8f844129401f9fc173055cd9ece6f68a36f601e31d8276b12ce2a1f86b31ed95573d1ca6a310141f31d58795cfa393fbbbbd976a312cc54d70ee9a718105e4d580d333d0cf657dcf1bf64b595a1875dd81841f4b80d285a49440fabe953bf3d31d78c638393f75a8d34a826088a336a89839a814f75710b81c193b247ed1bdf17d0b16caafb20d4ce964bff2f0f5d892df150da40acfa584b62a57efeace5e8cd795847cf8c8c46ee840726484774c181ee4ab040c0a428c87dab994f242196855448afbe4ff2acdde893a1913cf7252954a6594bac20c39a6f56b8d9f98b9f86bc0e6f7d589ea5579f59a376267a7986ed63dade8f26b66e4f1feb07065bb84afc8fc676c4423631177bb0f9b8b8bab174df493008c0abe5f9944ad7e0962602d137035d62d957b000cd12aa080d264b670c63f289d184d23532bb705ee572917870b9467454e1789491fc790fcc7e7d4f4dd143307eeb5ae5c49cde1b9f64d36f19b952fb73ac738691b2cc7909b929e97ca5325bc62fda2e3c95aadf52927348216d7200e586a9bf38248a67579b4a73880e52882eacdab2a25530c23691723ea473ce2408f83ccbd486a61871572ca8f855af9c04b24645a2c5eb92e47230201d06b7ed837c1ed520dd776045882bf7b1ccd875b32e687476a21856453070069586d8d2e8c01769870ca997aadced0dc15fa653c477790fd851dcbce381b0d99d897e507cc2edd7b017d77d8603cde6311adffc0d131a39bfdebbe8ccea33ee96627feef8305d136155dbfc21ae894d9aebfe149c5371184ddd08d289db6b8fcc7f72dd9242e7a8a42cf2a77f534cbf94f3a56bb48854d6165298cc2c8576c262e2d23d2c961cafb3626b4d8343d3338d4b917967857a9da61866eb535e2dfab4e6d6339cb94c33a8988fe41f0d4d4dbfc57f3f54bd9492675134e9b5afa97db09404a9641f6c3f6738f7b71f0faab4335c81f77f513c4c77bdd96556a32eb80c7ce9a6f792e7ef6cab4f1ffc6ef5cba16c7c29f9043db28867ca93405c59dc635cc8f94a82c4b473a5b176e07222c1e0d632d32a3423bce785b88e7f7856465591eb3399b2fb80362198517e656ffb89de49237844409971ff3bf9140b581596ef69d1a9ebfcaeda511bb20df53fd948e20f9fb2929fb3cc3b4f679ba86a98bc17449db9f3f2cabbc303824c0fc9cf69aa0e999401920694ab324e60c950dc850ab7e7a2e5a0d0860f0b5187def9550b92eb93e81190b0f98b43017486140a37ebad6ad0178648b0502073119fc6395ea66fa8b86eef7fef18f67556b186be06063ad420cd51258fe061016733fff74ff4c48395bf0531db4653961cf27ec36d8557f4a3f3d8cefe4337e21f7f8cab5e3ed7afd5d2c02484178ba2b170b71738146d61afa2c376d712cf212f8acfe1ebb6e4f6a62534670aa368e70853b1c634affa3bcb6600f7428e9e2ae5ab214f9b042ef0e6dff89e4621b47dc3ae868c0241cccc9f971a6076e96081823f020ae26f651d00115f7250f3a861e37efbd46020b4f760cafc26c075d2fa4489b62cf7636b08143203cb1c987fe703fce86658e8fa8b5c81d8421d6c29b9512db081f66507c5fbbc1e60ae42f8d73bbacd90b88a80e4c44625f106d58764affed3aef636ff4abe422fd325b823a94f7d2e92b8d18402eba01794abe1c076c9a3701ab2ac71ee5d75a3677727ad8d86ddf561d92232f01ab16eb8934693d7d9ef260b0ffffebe7a1e907ee7642f8a414c7a419135727369220568bc13fbaaafffbe76ddb853c21f43342bc80bfc85d1d8088075ef60491612649c9f015b87f6b207e422b6a67a8d08b377b41bf3f4cd14642dcc84ab2582cece644a9640b1ecb97a27f97e6ac5d1dd49a3c8417ca0dddb1c6666aa68b0a0c5c6d217016988d3063fdcaa93f3de2c8386e4ac790352729bcf0bbde39704bb10b339edb4e9fd4c73f87245a20b73cf9c1b4a4159653dc4a94346716036b1b0f76bf9b1caf406cd7a941572c72a337763faf12c563c230d1a9790c63ad3b83eb10deb97293b6fb9184d983dc76a18037da6ab40de2f2140197ff625699414d4fd452a778278346c803301f3c5b61a02aa626843d748075dedfa5ef6bd6acfd6006741cf07d8412a0786d9e701dd2be258a1f481b389525b36bdc47c188c748f7068c30b290d63b8d0cf58753973f6091b433fe2f397777a7576756a7aaa4a6d65d7b1bbecfe13745abfc36ca33381f843f2527bd538f222a3d1b23c67e01a68955420b69763ab2badbc071d4ba10ed5d42dd0757fe971dc85cd748ca5cebf452c7ac9724aa3e9fe93ea6b0febd47de3b4aef95aa131adacd200fab3126faa0aeafc9233a1dae87dc7b326b5d67e37c30f69bbed592bd884c84ff43626bf8afd738aba67ff5081adc3468ef43143e8eb417d298317103f6ca17fa6803752c69adcbfc4186997814ef09b765b2d2e90d1f25ef46c6f5ab4ef3910eb8c74a97a61279c683dca3bf806319cf094f607281106d48dfaf2b127be59993eaa6b03c9cbfaed4dd6422952f6cc507071ee06e9ea33d7e0ebb9b79441ae31a97aa72a8ea877d4d78676ca7166a180857569392568fee3be16ea77405fee9b50f8df179ca3feabaf66777fe8f1e01aaf6a9029ff5e9cc01c2cb21d3484332e31f77d95b8213971cdd2d007e024e469f02a321f5cc8f8a42fe5663f71fd63c1b6efba10574cb119f88f9b521e6626fafa791ed74088548dc5d4af42b8c3992d1ad01e861efc89e281bd56e2fedfc4a5fa872a05eb0d845736336b52c31c7dce03728c36fff99e7a6add6d155db36414c4488cab2262b626001d7da65c2b91370cd792dedf883cec0fbe6175272f834f82b3b6a720dd5b19c9be7aee49129dc1632a68af45da1ef26bb636626a3f0cf1d7061484064672a62d2c06636a6b76c276d90502dae72f5d4f6364ee231475b0246f31c22c380ae017aecf7e275f60ff7743df380f25d03fb2c6ee2ba148e5c78352d60bfaf545f32d62efcadb1eafa596c7f704b96fc760cc463a16d505548ba36b6b739ed4657853c0694782559a59d46ad57d8cc2a0aa5f1ce479b3ccbc262e5a223c099a3e0495cec88d74c138d9366159393fd6dc75435d375b6d6680a14b66722d5bf22a5a5700a6c8be83ac39319f56e94a18884eca81d754505706e8deee460919b3a798d40b0f1ac12fb35fc8d6b837b50c5f4859a2f8662423dfb8ede255c9a2eff1f7d969e93afaf07f7785a97241a594681580c80ffb9b0c582cb6956e509a8657487582c775ce0cbeb2720e59db1ee30536be96c488b15b06ec1da40896d3cd4523cabebe18dd5d25613d9919ab2354d4721ee2eac78458fb51946d4b084a15fa5d3aa57716eafae22472ef559fd6f4e292c96f8c41c0a76e1f1685cde0eeec47a74b85fece588436fe064b95f91c16a83cffc98adbae23d520ce3da3fbe68aa55fc0499325d9a2670d1e0e72bedf0745ed6b0c4db07e54dce744c68a5897dc96f9a1f67a6ce0a194b299c8fc2b4e9463f72c280787eca3ed9c6464bfb2b242bcb83330c14851b319bdd799ff9d56cbcca028705e2e80e829add8e7aef9bf099a251d09e5b029be51f9880b6973c62bd172b5304ce819be9679ffafae5e4f2aec9f49ec1ad21818db5b9e42cb6d493901c125e3972428322183fbffba7c3ed50905533b3d20b3439212c4b082e71e0fba55e12b34e54860aca814f3413a897e6d9d3198d033da211a5f7cfc1c90e2680765137007e255f6ecb92b86735ce5499a0fbfd0d2f6ad3669e5f81c88a8c81e4a960baf7322e3cd345b6e370dc23226f6c0ebce48b23a0f234d64ae75b321b6ca5825af74346dbe69eb5ff5196cb424fa72505bd728ce2efa73ae3bcb43c85e3c6240dc0c5072352d381d043c9affb40db7b7967089dc87fcc2073ae14b7552ed1cd418aa74140ed94717fad5083794d0616ef0fdd3f0b9c2600f3120eecf9b7ede8255655216aa5f9c60fb39c466f08529735cf90b6cc6f8c7195599f876d778b62120406b2a1e00b94ae54ce420f1d1d6cd35fd5954dc92e95ab3366f889e1e63a84ca2614492fb7ff5197a25d6100793bfcb652ed6c7d831518735e6cef194ee6640946c40324dd0f4ea4db71739b07de7dbe5d5ef14a95fc88461923fea7f58a683588451b423878901b25b760d421455c9c8507f082f71f487e33116069914a3d696cc0a3660b8baafb6bfc81b1305fe9dc55f2f4fe3dbdab594ef6196b7c4e2dc9c97d03d4d10f87c1cba30100587e83eaa64d4a8333483b3fe58b1a8f00d8142711eadb2a8cbffaa35c8d61bb1ae65673d83167c1cf85589b8b5107d823b6ebca17c28cca00627998ab7ca669420d4fdc246f4ab5cf9655c268859cae8b2517be3e6ae59625c2cd8084b3b338fabe7e268911e43dcd8534a83a2b0ed92fa462c637e427e8a6ad009acf57923c33b6d8dfccc1f958c848b5e8c4050a882bdc40bb37fed6c40d7e1ffe58410bb91ddda8113ac841b8eec13bf5797f018fad6607fff0e081097a45e82019f6749f0d394e3a30cc7e4c6e7380ce8d06b3062233f89ae10b12bb0446536c16cb084a4caad730970ed2595ddb4d73f9928d080277505801b551b090e624e127a39fca67695b4d24ace3922462c8025e4d87aa710375c608e320904a26c9175307fa768dcdcc9f4ac2ed1ff0c7a755e567ea173d75eda9caa4280687bee6ef66dd90bf0d29645e56faa223f4f42f2dd531607c8d5fce04d347b22e37afda2f7263c67819f48c104bf6dab1b69960e237d9d294f63c392567da0f88af3d22852ba37804f01eb44ff533704de999bf1f77c91f8014590d3e013767f32ec80c423f331d6d1f619bd171f45e533592d7af85b5b8972b2fd1bec3dcb85d5d9d15fa47889c2efc80c5dd854ea9af9f25894f2893e85ad991f3645e79ec364bc01b231bfe073c421eea5b884d7895d01cb9a1d905e45720b0a41e04aa33bf68ac7028e57d2d94e68025bdb6f2fff7ad193c4e15165b8544cf9ed3824fef2c1ac47dfdc9bf11a8efcdaffade096627af9294f"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000107a80)={0x0, 0x0, "35267191a67174233acbfa255c4331835e570391cfd2ae33ddb5adf83337cc5ead0d631c8eb113a74522ddaf57672dfbfba8ce2d7403e1bae0b27116771b14cbd6fa984ed7b5cb00156df9bcf1e07d8bdd6338b9bcbebe8cc02820d80b053226a9c1e22618d5435319d5bed2789848a74d7cbd19882271fca3c579a741603135c5348feb355a9466ad0a80a75d2823540254651cbc35f470dc1ad27405cafb058a6bad01c11a261c02657d2a48ef1a8ef382676330882da124a5f117603f01b01e2e4bef224d8c1dee6465322709491f997f70cdb0aee7026ae47fc7a3d9549464489b568be9309dfa0008c5d9f9b930b048cb543ece85880f5e0c47a6e02feb", "dbe2697fbce38424786c3e91fbca5ef7f1b81c77f53f90e60334bf9143fd9753a9076487817eabefcf3e60b8f14385870a2dc60d7435782be151c3f9a22b7a4f370951cdd6660e837122f1907221549c0c25359067ab57ee6238df4f73f8b76e298b5303a3fb6b4b5f284df80f79bfc8abfb15b6908795f7a24c763a3fcd89e33fb9d1e476bc11719511d76e55b3635ddf4ef9ecebe34d030b9b2f50760f39d5b41da879f4c4131fb00ea0bbf2811b63ae4751ed072f79e5b017c8b43e812459385a4a4644c1698b25718ebafa0c7ab7bda12069785bbae93a0bf969ed9722415f95bd0cc056547f79bafb6d461ea2ce48efab20768499c36c81dbaceaef90bc90762d733e0de6e76a48717b4c7b1bb982e6edab600687bf1a0234401f529d37455abb7752646f8f61d4c88373956350a8f50152bfa57c9db07c863f958c34a0160707c5ba54ccdaa6db7b13bd2e62920b43bbc463d5499e0d2bdd22c59992e5427e9a59ca32a57c4ba56962aecbfb493efc3a86e158a8637e97f4aca61ddddb575d0fbcfbb264444e0a7a7235e79cb658439b8bc4282e27e8b026a344504cb1359519f90159dd0af5ec18ba2ed948f29172dc343a1ce4ec2ab640c764b464417c838aa507dcee4b5d485f6f02ba3f3ee25bcb47ea0a3a11cc85a2c47f2f9b65ec1736861683db6147622e4e6b42fd3efe95892de8da28c98443b501f7b1cba38f6316c723db02c30b5c9c699f6c57f4ec18a3cf98e3fffe48a94a229a7ac70adcd0730a99c46bb9e7244c53d99ec26ae8f341bde237e437323a285de8bc30b9db91269e4dbfae4277aa35b6efdd5889dfe58d90d518601380860a66cf313e3e7474c377a8e7f9c20a4bbcd8ed2390849e5ef58f995a19a8b10a36ad60451ea551a383bb058d4878a2de4fe5ef2ca5e8081e6a83f273dd469612031d9eaf37d4bfa91071f2d3ac9f49da4099c498ec59d76a2581a558473f42d43b8cb2e648727de4767f30a567431478af5041237d0f3d91391a2c9cd49098d4829f3d8b9e7ff7b33f4103fc7ccf23a4f3f877e38c7bb4e504216a6486e7fd76b3a55ea417b25ca60e7df1fcfad9d825ffa88213157aeae1511df69cc0b2dbd311daeac3c68f4e375fe890016f246fd763452e143285acfc39eb6654aea6c5e18d39ebfce02200a500011436b65ef12ea86f7b8a8a79a986b29c23e6831bc4525620a68099e8ff0c58cb9ebc00cb5cf20157e1eaeac125611f237e2fa714c53e8bdc2b8a5bd6de5ad648f408e97b7d798eaa42c396c79b52f64361812bf9f5281ee57223ccbcef10e425d8be4d83211125c04b014243177b6d73463c71bacb27fd0633577e2ed772c4f0b34bd593e4ac37ea57ae30866eb76b58d8d809e6da0824556e1fffd56c4c4e35cd91e1239b1346b887d942f7c2923f046a3223647f4a483748c495c210cedf057a486e7bd41531e27fa696a82538da499ad5e8259018ec635de65591fe758658fae5e81efa3a4313952a01a8eb1a7e49f3ca133ba9129be374ca7a1e9de383506e42b6f509f42ff87be93adeb78ad2e9dc3d003aa2a72fa020472e1addbf4dd57825aa8f7761bcf58dfd758601a2dbf0af9dbb4974a6159bd2a9f5c691986cb9e8e9e98e381192657beef544ca2017b87fd8401cb6a6d67666737c7159c1f1430310c4a3a32ade0ec3cbadaba17ca0b93d39d1ce0a2b77f5f28a3fb7b8dbf3165b3fff7c058800ea2ac4dc1c8dcd015d4bcdf3879d1b59973075538129f27371e52051abc52aee1482130808f3d7cf40a8aad777359e30e468bead02a1d20331d816d26d3ec0e30ccf341d6c2d364f845ec287b0500fa1e3c9c718148d9cb80aee0218fc9d3f692e87dbf69223388fb3f4ce7681f27dda809e34a9f95b2a5b82456f9a36e09c0b13c601452f03a104eeb4f529b36cbc998d9d939b9785c608c3e5d616d860a150b2b336c219fc2edd447238d88563883ba6b9538dc4be608b6cf21ec12a20d8ec43b0042ea2c3cb84d20e9790ae70c54b7d615c10f5e9ca222cef3636324b5e155201f0ec25f80b5d1429872b88f6fa4a848f5db4acb8fa483e763e042ec10b8359dfdc4b6518d8ff71450253c4ba91652b75869abb776415f566354871002cc59c5bc63ceed41f36d4f3111946d3e31060384ed0f718d8fca8fff62ef8895b5f7ce5aa01c57d0e46496e4ad09d953cc716644d63f4dff12703cf34556c5d88faf991f94e87ccf9f491e3f7e9f305c295d84d86b163de716d5c44d5f1119439366159e02b6b8b9a2fc5b469d41a8028d353d75cc356fde2a67f362271fdc99d9f1834343cc0d0600fa299cdfca6f14d04ae9908c7c08e8286d7295a57d2c0a69bd1d0bbfb78de3caa7f3705596a3dbbbc600d2a22cb85461bf215c48c99a39fbec7669c3bc880bcb3fe69f071d9d97bfe369a300bc9e546e1d53ad316a2e9b2f0d78204e4b54f94ebf7db318879d5d6a75502d5a3ddfd8591e85b2f908bd64cf6e0196aca64db8e1d95cea652fe18d7a32b0fd83895afc5633ae4b7b4bd71b1f1d32ce5fa7174b9913ae1d1c53824015052062561b9a17024a1896dc6d6259526f2da416af1c1e04f72d0c3bbf388b71f2a6ee57b78793f6adab235a3ae1ef3d617ec558ea53baeb216071b4d27b0c41d9e1f07e3cc3e8fcc07dc86d613db6435a388f4c9f08c4c670441bed3c20d6b8069e04d48ca5d896bfda5b2ca99e65b7b8b89909aeaf91e367aa6a0db4a509a263a0579b456a239bfefeed13c3a0648ce83615359266c8934cfe83e77f9563c2d5b5a0233c08e888099d580c9d17a2a2eb16c5c4907f8c28b530e1258792b2558aa2a0fc8a216da00de159c5ba851c11a5859317766a882589f7febdd015122ca583d2960b32ffebb0ccb05d89e5c2e29a51cfcfe30a10dda8cc1103faab8bc368030e091b36cbdf70779e5ff8644a75a2f2e528b7a91a0e26aa4dc3181688ac09b7875fce155d7d6b5a81835bb204e537bf0abd34f97b0145debcb7c76912e540e66bc4da77d3756b10d3614d6821041034921cd60db85d83a51ad236c4fa27a9af1ebe9a8e2b34f3738972a304a076412622ed414c3511a4ff58ea5cf5c7c33c9123af4795c667b7a5097b16f8386fb801b31e540f1d1ff7358c3ebbc63da9ca6df25a9570913096af2593c4096f655628f97c304219d2a1967d127bb1f7ed8c7ba341dd78a7e9ab5ad66e5181fc7f629652687a7dfc05937e36f0799640d2128837cfcfcadc3746d640c8581a00fe931f478f1e06fe3fc24fd3ddeb2195b64c3d528c6e37782e3145fd7b2295ca894b8851496e04757b4d338015ed45decfd5d04d22f890023883d99356808ce5f6bd90642d9d870726ca55cfb174d92c37a625328418314fbeaf9159f89ed28830180f3612dc2600e723883d26f38d7d2624829aadb6ed1002761a718834901769555db5cdcab89322620fbf48ab3e76a32d2f7dea64e542e2ccd84983cc3c232941f1fa1df2f280e7d82969dbf6134a1241b88fefd23cfbdd9dbfa2fd6c720987d249ba81636a470184b28c51b4aab175f35afd861208d8c9bd0cb316b0e179a0a6258551f16f6c0f9405588b035b34602fca3197c2ae09eb0480920a90ee13391837bdca26986e2935dacdb05dbbdce6a443328397f68ef7cd2a5c70f58dd470dd1201e2fbea602d5f227b851de4eb552bfc74eb7acbb4735c97094a9b0967744578e7c39d70b59c307297736ba2a98b7a80366f509d62c7f38aa3599747bbc68965f5159e291ee87ab09544a41aa5e04326084149a7f5816eb1d8259b3e03202919ab4256d0eb36bcb3ed999ecd4c1110dcad83e32825fe7cf3930afab013ae5227ecb2d3efe688c1799000fcef21740fbdc6d485c677fc08ab8aa73417176a56e3d555fd9a8a7f72a891e9c266a9b326e0f8450f8eaf4a851e253f66652102ac26483ed2471a8db915e7101c78a4a641c9d4cce7b7c713a4ad806f833bcaec71d2e895a6dfce011090bf5dc0de87dd970a92534ab7c8f4c85b6e91c9cd11330fadbf62d5a1d9309e3a3cc822ed7b50fc7053df6b93d41b783580cd171a545deb8e54ff5966d147f1939772e0ebeb89fc7d38b78e69b7fc62713aef8d7ca14caac21f54765ecd3517d7589e7b3cfe0803ebf2534d0657403e783bc7da6ea3e168a47f6655cf1342c4fec7c57b7a0de8f0763c93d259097f00400a7e1674b3f9b174fa4b0aea5ef0544ca507e426e7b9d384bbf4e4139e962a1b722ee3752f397345c0a2fa96bdab7435d27e34b13d3bece1818c330d9cf099f430c679d9cfff10ecaec30d92a1014a3456d06ddd084e222a00ad241539e755f2a9b34c4f91f6a83be7d7cf78afaf3d1e797682dd4e1ca501c2bac292bd5bb0c39e2bcfda565a27419bf5bd81c94db754bd234c8b33149dfd23c16bbe7b863d4150eda99c8e218671c2553ec7a36810c0b084284b8a84282d2617ad6341e5d8839e38610414f3b1ba475f4ffc82fdd73031989b6a53459fdd55a75af5568a5f64484e2c4fbdcb1a05ceee4dbd23a672aa0d59acc5b3dfeded8fea320a327fe9ce4962ced2ee4351e839ca1497e549f1a7c36620b0002e89e7e45ad87db2528af7a63c4faffd712fc89a4a2df30c0956bf6016498955626f8b871a4af0282c5d5c6b6dbc47d166458833e85d2e4449b4878dcf83c5fad0cbce6081fb23912e04dd4f5180718b5a761edcd48d0aadfb50e60396926db3a1a47f006dae4764a84ac7c5bd9d9a37a4f07a57b0555d02200442ad5082699d8d4b1f969b845ccc5e5bdc9413886d8def3ad63bf618863c24ac24516d665e42dffb6b4eba1d05db5805a13043cfe3380c6700b2ccf77b64627687d16f06e8e1ac0e540d821708f15b5935c1fb55bc9d62f34f402b592435f4c49c2c7b3b97a0aa4b7dd53a8211c89bdbf37714640113af5e32ece9511342341371412a4eaa6ceac08a9fc833dc8457bb3c8750e2c20cbaf0a93c5aa995a2ca29e39621694da7f15d122be74075434a8192774a84fd7ace16dee1dcd95e42ed1b2521d6e7ba20c1dc822c164166ad5499bf05944447bfbc55b6096b7c4a23e57aa9d52378783b6edf2213b0f82ba0c816d314e70bc1a726e775120b046be2e8de1d460a6edad6148681f69acde25dac143519273a4258ae69e9e2943e256e0682b2bd01e090bbb0093b73501b526d922745d7adacf844fb0bd3330e22bca518668f27b3111dffdd0e29cb855fa4d7253604b391495d4308fd9a49cb13e056a8b006691e7688c13b602710bbd9c4ba63361589d607b9c9372c0dd313f23589e96c9c879f6ffe854bdc745a9a4ca2f92ab7a1a9a22e63ae1e9517e9efb4c6da6b97fcc522bb36179c98465087500020022e7231b2612708c2072c0079a46"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000108a80)={0x0, 0x0, "bde2cbb5497e0b9c60d72ba065c40ae36b09efc6d252654547649a44a00e1e92e9caeed10e624c375f95b37796c31a40434f77776e95e70613416a27e9f5d0b3c884045398af24240312b9cca6a53c9023c0223a204aced2523146cca7bddb7df5362e3c2044d837beb31ced610006ab95dce883adc7a1b5099e96a1c40d1559764ffd03c4cb55d48b2e05765ea10e261d4eb77e365360ad6ea8e288435ce17e03eb2854419f17c04680c019c1d394215f956979772e0245a99b4926ac7e019dd8d9f8aace05772a7faec4c9f415205b1056fa29910a09b000fac72a2a9f0f8b612239169e81adf58a90aebd871a9bb85f4e49e5aed6d5a11c1543fdc56928b6", "97ba3502523e82b9ba8801a37294fe19e0c7ee69b16a0de0d59ce801d329e6bdb73be07e8510f66e2e0a4ba778bf8c68273325f627b67466d6360acb6f74eb689fec17e5beb3f4ae5945b68bb4e199375671062bc73120dadc8d0abca6656d5b1293648e4ace188da7635f827ebb4ce0cc8f57cb5bedaca53d287082c0b7164919d11d84bc287816e7c27553d3880431d1636ab80236d49625df0a803300df910911a9f88538544780b1873f385f6b16532560a8237b4713784ec288bbfb8209c7f452fd869459587c578faaccee5f41ea26ff1657284f91608149106dcd64b74663a5a0dde090d46bc6dbde14f5ec22a0a786ea3a0840921fc945b7e65f93b0a1820e5271358fd5212f058c805467c94410b134eba7fb4afe632dcbd2044079347bead4a493545e5e7398346e79e26589096794dffed97bd4377f4baa78e1738b435a3767b3d6e726ddd0510d2ad43457612c1a71943eae1bec83284896ad4f80d57da38c9e5cc040a098ee27b054332e64f8a9d5af685d2eb582f16b48a91eee2fdc8e2bf0a1243c0143b1c60f2f8eb8afb92497fd939fffdd92281c588c58ca81878fbb15f0a03dca1b9ba3f42319b253df7c4bb77f21ee3e8d04346e8da83698fd9767d698347579556e1d98e305b68f960807252bfb1d8a6ce8201bec6205b57bd4a1852dc5f001906f2dbb85e2ee99c3520e85c21817c8a859e1001e416440bf4631bac95954f35d67177cc80d28cf8454fb0d7eb6819f30a3655a4d11974988dbb39a6bc53e138488dc29ff6a0a3e849ad59e5a7e88fe82239691b3f8040bb75c1e6fac9603eed238abf9c1c6216118fc547cf7d4ad900587220da56bee8c647f59f34df9371d8ff5bd47b08e176e3f2f0e9a7e315d29a26b170e6d0b6f0ee27dbacc1e9667795dfab0f7b1d151132503459f57775a47ccb325b178cf7acf9478d78bacede5b568460ab68fe3cad0ad4ec0dbefcc2d4886c200115035ad17e22a4a634526854dc00bbf814f59388e645885137cd55de6fc152634e2fdd7d41352307f3b13bd92de4071254cbec5706e7e0a761c52a5fa81c36f0d4d8ef39792498b63cd43c99e07e0b84d167267c4e8e2c278ce41a8177c1962998590017acc0ae85566fb0d1049fc91c49b38379760df03022ccb478724f25541e145cdafde627b6572c64564dc971983b3549f8af5f2655e02cc6048a6b9973ea9634830b38a31e3521803b31c86dc89d7ea7c9fc05026553e4c5c64e962fafdc0e4090142e3c263f8dda5f55d11e4f973f715d6a3c27c9348f2cde6c50773926a3d14ab4b8db71c877e542a98daddfae5cf30a7ea060c163ac7d968deb1ddb70510b847743aed1c9c306dc536ad7355457e9aa5c0b8dab3c073dc073f2b54699f3e88141d8f492637b8d7a197c22555c29aad90f9aa50c41f3af04fbafa98fe7d16404dff2a93fa16591dc19625b7922700e1d314dd010a85bb8156a33c4f46f64d0a645c50b95aad44bafd8599b1469e3c1f9882d42f145083867af96a187e26a0eeb36b016531c81ccdd7f751aeda23cfdf9002ede83d7d8a8eedbebd9737c518945fd126ac2eb856bfbeb0bd35604cd132cc84af98b099d4b8345681e0299b6e33047a74ea921a7c60e1ecc68cfa67cb238e9c18821de181db2719f4a298f27802cc32c132282a4be1a01e700035598c9f2da89f0aaff0350e5ab1bae78365d45c952744fa5a0f71e5695e9bf88e461d7ef87683d46c4ef9590f314dbbe061b48d7d237a84f94927ec3902f023063bf152ae18632fa09d92ffa354bd739e253c6a4208d036fcb104f05896c4b128e147e7ed9cbf0683c744a1a383963707c8d417c4fe5f4146310b09416262bd9ea02edb6ebde26336300a2ab713acce9344761132eab3546b270e1664ba6b8f643ceb115c31767881836d164f9a665ffb69166771d98d50b580462995ce760aada92cdc7d41949f7b70606e3bd71257baf04e4cffd76d04feaba7e40231e8467c61d30593e37e76175e610384d342076e3f357a01f1177df5e327e6b7bf327756ec9203f7be982034f48a948859b6afa998a1bc53b8d9825dc8f4e82be08d44be6de2731de64e9938a66cd5cecc5899b9724f1fc1db39e6cd49f3efb72116472fd8768a303f9f0eccec170eb24176838e43509d9d3ccf7fb61ef2bd987d32b7596e55e4554815e670c48a6cce09747cb7389a8e5b2889dca2b3340ad751f7ec7c86637ad87cd81201b90d13496db6cc5ddc2842f9fd06099bee07abc1793901902ef1070adc3b7774a5175dd05f53690c7a7c391086c5708edaa12ead7452f987fb4be97b4fda97d670029dccd11dbc00fbb39039cdfbee4cbf2b5ef4d58859f6f70fe6cfd3078155823122de6c49666c1d5086719549af3f332b38fad6811d2a3d1fc57deed955fa7f7114a47456693c6e80f1d51e6bc7d88bb44844458e16fc7bcb6e400251891c3180293eadc1b7df2da1e6ee91d0e0b4b4064dc2945852018495debcd95e56c84f251b08abc3ae39a8da42bdf171c83818cdb72d2341abdc1ae37e453c477f70eb2c4c6e5265b51d2dcaeb0b99436ff6e2a9f4cb6a0202c2d4b69205defcf62b9c0c443b250dd84d3113e09e5201d25c29a647153a91bc074e2fd75a5c2ca6a72deeab007eecd726cd983410da133728789570f495117b4b54626ad5d08c4837d27c44b244ac5ac25c130fe7da9020c9e17a43ff2f8fbb5c2ed6214d1c30311e57e9e272a0a41c9d6c0841d1822872a8639896dc49fcaea3582eacb7e70dca33ac4a7ad361357d8640a5919f0a3f8ed641a807e21a7f0413684bfe3369bde73839faf01f569d49074ac8ca46ad075035c416825b27f33e19b23334ca6d08e83e15b88a20049bc79236ff97cfd3575339ef35dcda3a231a9acbabd1015611a6bdc2ac5611e4d3eb447ed636361ebfa44764d3572dfc972423ffa0e7d159337b1562b877c02304bc7512f4786e3ec5ca401f8bc365516c3db0c3bc43203e61cebe733858888cbd1f0058ebcc142adbe3c499ea8629323546457cb2ee7d25c184fbb3e5647a6811c8b3e6e2be925ead2ffc6cdd5396a93029e025d7eb8cc0ed92dad6f4c4b1b6fbec988571d01f5325cc202603ab5caba2344a8f73121e0cfa9c17c0c169f62fb309291f913f337c589ca5d4daf7ef265bbd79d12a20e07aa215fed8df75d800d780ffc6f984cb2e570aa66498eec6a8b093411630979e4d2e5cd267e6f4efa58e5f5b6938a4ca131c471b694eb360a221c605b1fc7d86a82952d3ab03d95031a45f881bbc80f06d1dea32710bf09b9ffacf670fec163988a8dbd5c78e1204eeb47651c4e58f78a239e7301cb07a0c197605410bf1b052cc948d19653a946f2800a6e58a067a35920699e68b2b667cc937993ae3d630085ba94bcef0e559d98ee853731068bd87c095b766a5a7b0cf7d6bd39d6a79e89d8f55d8cb08b01979a4817d5af405753e8e9249bb811d0afe4273033bc85dcc2c4bf9ccf90fd103294982c902e3c383ed31432215ad31b5fe07abfe5d8b8a660fc0c5906f21198d3b9b811e198ce112fb709b6bf7791cdfa468bc34aed6e847a0fc60766190460a8e6ac54dff4958011ac6bfd7bf6bfdc33eec5331b88916f08cd7dd50f465e42215961fa806aa439b87c636ff937cb2e6f4d036b7c8d419efeaae5456242c622a4638e902192fda33c440d8dda0ec4ae93af72dce2884f9b0fc818a78fde8a6d07fa7e0093c41a2c084a920ce7c4e4115ce92439a71915c08d04738c35c92e13c4a4fb57c18101a0d4fc800f0b0a248125670b20b5214f73917ba3bab1a1f1d67d4d339e0e691e3e726625996f757cc156be5e13141163826b49f5d50bdf4d9a596bb62745c269471f6072fbe65e396fa365f8fc845b4d240a9aeb63fd6fa12ab1e0fd05ee540491dc5bcefc1fc4c75b0366bc46243f0776dd3e8027542ec3dcd95a05454ba20a6079ac6c8819d643ba3b5309f10dd725c770d1462d79880d4f4f3f2f57c514ac9496de5748225cf5cd3497053987931cb23152adc35d873f18be6cc50357ec258e51e42b8fbbabc1a28d3f7cc2a1849c77880f29caa4587e13a8ec85788376129c3c319c8a5e24c9b0773a16de5bb38be2969adfbf37a8028924773f2cbb62e124c75ee326b3567837ddc451e1c8b05275395f8a4d6b83eb54d010ae704d038a370dc51a16bc7ca9a29814ad87aebd74135b4bbef73fb32eb3b3e6b8cc7bb47916db8ba3843dce95a566fce559c5732579dcda053fdae4a3c6f891eb203d8ae7259013e661de47dabe12889e36dc95e28e6ac1b95bbcaac65ee54b7e91f564965b5fc6632bef660f63b1009a38f1d18ecd3539492c1906f6865ec38f9a1f3a0f649a951377785c936d2fc9ef15308dbb0d9093375e3a43f5c986fca8e6c255a44c693b196d3aef45295f3a301ff0c6709f9e17eb91a8c00c6a8929844cb73714be13e40ca5c16424a2026e2f910124d52b69ba16abc0bc66eb09e9249748ffed446efa0ad8f6f2d671d637d2bbb9fcf1efcfac94456979791cab6c547384b6538eb936dd644c20328ef442ed80f89c9e9a0f167c99f546e9b19d0615415bb8e97c6dfd58a6bc23d48bace8106fe58175061b5f07e94d6be25a4a5244b6eac713fa765cd9103a0f75d92e03c2a0517d0e889143ce9803c02e580b5626ac9feac454f4906b18c1296c2d0d435e92a368e6f195459b6efb2d0058df7727dffb6b189e3db8252f8a5266a9ec47b003f56c5e19ff85f900d72987454e2ccfdd69452f87696f9f4712f19ef9203342cd25d9cc817af9626d9bfecc2e83548f8bb1fb36fa4c0a900567fc8a37cd831a8213ed0e858f6a01862dcea373d121c5f4a5adb97bf9b0f248f8c679ab721f22bc3a65a21ea70cbd6850361b84ee9b3b3a97df8b53c3fb3c5fe12b1bf65c183ef608213deb9de30181bcba33db7b79cb61071b38dadb9ba0cabebf0760eba3e470f4e7ab70173dbc905ecead08f2ec04dbc2c6b64c54fe75bb4fb593c02e9c440d773d4c5d49f09a3494256445e9dc740fa3ebb40c4dab6a8b14ad62b66cb8edd56a040e1fa0b387aadbffed2003c60fda472c7fc27d48d9c93d80c26745c4d0ed5320e4f57832e800143e196f3bd777ed9edc81fe2dd5f7c758b1d7a9ddec539c634bd38a5912bb4be1e9507434aed53831bc352c47de1541d39b628d2f2d9d815d30c86c238526b90c5e72d90b5d2d39cc2341a91a92d189e2b13bc33dc036f68260bb4a1f210f2c8cee8ff0982b05f501eea21b59b079f857a55f6acf1eab2bf1187f296bbf8cd51ffd95a52fd054e63d1cdf41cfaa98c2e5e15c3639ab1acc85ec369284e237717715aa114a004b3d60d22c0a7a1206b39325308330c597bec8c4b68"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000109a80)={0x0, 0x0, "dfaf4f68fea4c9511572261348d4aaba190898142ad804a8c3b50f05a8198204390408432c74e67594a14779c6d6ce4b3385076c32c35979fe4bead70f91c44c036006698b3938d8fb149e684296ca238cfb764eb27c5e18cceb08720602ddd36e534fe441c4177d5e8b6b4dc93cca506139fd5117fe6fe358557aa5a370721331520cbc8dc0be9cc05a9a0f9e39026a4b550425c069bed9f46ba074ddfb6d4ee21b40d79adac895f2665c687e94b8b017d2ba84bba505ee6ffcb736eeff33a75f1eec2c732a018e8531f1fb4b7394d092181c8ab7011601b4891e403542df32898e366aaf37ff8fb58b6b7b43948ef922255cecc6a49cbe66e0e614a91370b3", "c55a444fb96f684ba467c9ac37040d1612bc298ce98aeab7dcb9c227d1e2514e7852bacc77ba4c848476ff479a00bb22cebe257c02b9bffe2d437a5a741a8cdfc0c1d2a3b53ed71a436000e2814f481b40a84c50b97c89aa00a54de92a0a63dfeb18bb3152340d80aca5649b2bf1afb9d23884ba39862cd28f7c34553abfbe3bbade9381dcd5ed0005bea35f9ed367d1409d23d210ecddcd1904108e810bd0d8a79376149889cffb8c2774e7d51eb0c89189fa520fad5c876020d2d52e1cd057db5a1fe9da903c5b88c95b9e5bb4bebbc225f4870f945c71e1e92dcfba07c91bd777841175baae43e14933635fe4b79361445fee5218d82d642d21de7d84b83cc115f888c34c1db6d86fea20e3af4180dc38810da65a072bf32d0ab0e396c83a8f138237315bae59287c27b4a587ad61fe166beaef62214a6bb40d47ed77241de3d0b1f6106f0c9d0c47c8f7882efd99dae66bb68eae19645c2db07dc1309eb119e4719e662ca39df3d44f080332a37809c31d63f408441328c52869d430f9f2e5461ff94fdbcda698664b8088c0c62a6e7e380c7f27bfc5109682ef63ffdad05c79d6fbce248b4510afcf331a7d821f8a24f28fe1849242028e054856286e940955a078b19b18aa519dea23376719216fd9c903412f0ae1fef93b6ed0adc17a4882e49047692f6c0f1bec981b100beedb41365d05e05a2221dcb782467b248f2d3c0cf5e0b87540acf9e6d1f5503e3df7671be7f7bf0454a4d7a29280019c6ef3073a0f92d84b037767c2da43fcb353f1912a83ecba7aee8e6b5b593e34be17113615fb9f7ace9c8e0042066a12d410e4876d705df055204720b87c8f5facc3f31f362426f7f9becc25d9e781cf47b16e42715b2850e37f57b5fc20ea55811bcac390941e408cc989270dc2699052c7b10ac321b30ed9fbea12c0c09a14a087ec4021ed4b574103251078043eac1a1a78a9c98f7757ce8beb5d99fa3d7d5c6d38465545c4ef043ee7672a303906e241cd8d0fd9fede6ea82686f224e2f5a3d4ff025ba5d82e300f9fa159da41387f01186b55f47d243648711fa96ea4ebe8b2825255cee7d5dce64996c48ca1f643b518288f1ca23d40d5607fcf14eec5a819971650f677331b8c1dff76fd473ebd5e94e78f49a4ef492deca4377de20c178bca76a29f3e9afd5b37c1cfbb3df6c8205b7124671478a833cab0dec08a224e9526dc1030bc7aa62e9031409b2bd1e089fc742cdc7f2e23edebd7d6bd7e4a09d6c4f07aa2b6d7a90cfb7e6aa1a898b77f3236ae7ff73f8bf77d976ffa3a8dae35986201d390c0ace643b30a49ef98ae7b6e57f7b09c64bc9e30d68e3a35dd5d33ffea472fbb1bf5c2603a669d2ce1f6663155a9494e08022d0410d0ff6033f9c887ec875fb4cc84060e72067be5a9201153380fdbcd97f441a62291bcd91b09dc7aee662319086a47f84052c3eb260b73d775e136527918de754ede1bb4108b4c0bb8a041b8bbe792734ba2067c07e717f36119922682151a86596e47ab08b906e17878d7b2d35b5848d93c528deefbf3cd01d4a87f98bc1f38ee4415366285f55d686605208ab9552cb082d311c96045fd42c407f0c1cf4b0632c4c09dc73fd3913684beb5ca319a54adb119ba329dfa67ef74665c30c5377857d7a8d567afc8b648324dfac5dc1af7c09dd90d0e9e92bd05d25ec4e6e1f306fb2f99a89b5ba19aa47c1ea7952288c30abd10097c8fcb767045707f9e9a4c775c5b50f3aefdc02ac1e9c88adad18c5511d2398fb3fbabd34b0e3b6e8dc76f19cf32a19086300c5175265b320f9a19521f27d068f3368185447ede7be89355a5923a9041479443b92cbaa52557b70046a07f1821965158da1a919f1f26630e19657085fc1fdce71a4e07f1c15792b5bcdf33af802a09d271366df5fbe91ccc78f247e03e75a17d4622cfbb7d5b7f06b5fc7be60b79189d1eed840842615f1ad4fbc57099b9e2f85c4e383c3b0b4dec4b7c300bff5b26d3bb207f28aefef81e3e8b2b9326d1bd68710a8eb2005a8cfdf94712159e9d880b39dab046c6a3f8a6cad68128fc5ca4c46fcd358fd96b14712547975b6b89255815c781fb4cca830e5f9ac83c5a135354bcf013fc4729ab96cae1fd03e9aa2e2ff7d841e10815d24f23c40c0dc73de9930ed41e9efb1a730b22231821393e019f644925c2bfeeadcbf0e0595caec626356ccd7339fa70aaf4900b30677137aec598637508ae2884bc10169fdf4b1b5ed844617a466ba42c71801fe1552e1c302f1f2929d489b59838d84d9b0ccb890684c1d85a87fe6630150dcbb5320749e878f327592234953af55493172ff697c62356bf2e3cc429f4d3657702417880d1d55a75e15ebe5025d346b9c4dc13b961074c34f732fc095741f77bdeb88121c58a6945b20e4bbd79c4eb3d5c49782493e5739fe0d0d080f825574ca992f15ca2a10733d52550a8b61aa4e9083522aa5cb68e9a8636c67e65d7bc11b4826ef70ebb433156702c0c6123eda28ad36b9973e47729263ba7e430ca0fc19d1a1b65cfa698e132e384dd0e7a0652bdc1bd95df9c43e067cd57e27771d6adf163d93d929da6b23df3d894bc2250eb5c23166ef8d887822bffe37fb5d9cde2502e9247c63124529ce8d332134541b3c172dc5f2982a545855be424cfeb57ff1e0400010b8cc83fb7d56f92bcd6a8485bf77a9b8ca1a4adcd7f7aec958195d3ab39c647efa03de85e4d6228370d38d5fcfe615467ec246167a2b55106e2b37a6844071d8bd23eba2af31b19d6d0333e5c1c2e239f0bad38002f6acbc32ec0662368c390c1b2d2f8a82232309acd201cbd424cd455feef3f6f10319e940197d4f40790487e18d60dc33435be9ca463e7a5da542c607fc5d1e357cab90a5a6a00b82871f3a4acbf2770ecdfa08bfaa090ea9976c02a3f74ce700b4e994e33c19ead284ae2edf563c4dbb498dbf1d60b9fea4b762c914e8ebaae851d842b86034eef45cd449eefb0c8643315b6cfd99ddaef176c7bdad40643f19802473d356959d4fab0ddff8dcbcb07444991efbfccb15ccccb273777593c78b1451a864ce1d8afcc2e2861250419b67ca8fea8a59735b3f75424203401b82ec9f18f9ab82d1a2b2b1a918487164d36f1584e8ade304f8633ce0da14cab984128594c2fe49be9706466169b9233075b34c5a4c018c2d8961aff89b3187200346d4c26030044225d35c31daaaf74d09e2e615772dbba5cb108c0f150cb4ccd40c03e8d00931f41a027fb19b7c9c477df2f15447df598c749304c05d8184ca0f8416dd7c5e2474a23dcf61961317249f3c1b304b3f21dc9d3cfd3a7987b90e9fba232fdb3ade8bf21584385e7d480e42adb927ea718298e225ade40291519d7040b184581de9a3bf40b7c3c2273b53926767510c24487271b9290907a426d32985f27c002c66dbd15350a5dc4de29972f5146013cd7373dfbc63b2701e9a830199afc5dd87588e863453e1b5644879ea002fc04d5db6bee8c078b44142acc9ba91a152839e2de4e538635d99a6a8b636db1bb1522d72cf6e0eb00f3b83f400187d5205b2556ec6d125c61ef29f100d14e979584343cd64121be8a2a0acf11dc73537f6739255523a1b8254cc48f8576c05bda155d34d17c5ece4a9a256402c4a749fad8733e5cbc2f0cc00eb8ab10f5c22b7207a5e6fe048d6fb14b185170e25a246048064ac711d1641851c06e89531d3061cea216948d9a7684a304aca6643cf5949cd81218e1b6934d82ede93d89c4e3aba340226f85d46bcba67b97412645740fc34384e3344f66747d86dd0749e6313a18970e6558088b619bac5298d81c4cd5718840252782c80bd916b2b10c632e51cb36765078ffd0c119ef5c877836a2493a3a3e3540e93de58ad1dff5d4e0f3ac05e805933a1b1ead6c51f4a60415b2cb416203afdcc30f146118bd59f77d2fd4c900bd8b9f64668181232ba92e9b3f272f3d6157016c3b500fb1201c48cdcca109466974dc576fc5c7987027c69f1aa809f9f81d42e1c4e5e52d586c5ef96deebaf13516f3d62f9573262aac45630f517608e9085c3a508d5bfb8c220f10bb7bd224c1a678199256691c0c3cc9145cbd6162c25834b495903b6dc82486935139129270d961110399269b398936ca31bb7085876d9261cb842dd8eee134e7f979c3b6614db704230e0df916c702ec192feec680cd87b04c1048702c37c2b3afca7d8ab2cf4781600934ef2f89b6177c74d2a9d1b144847c0ce48ca596adefabb20c7fc26550d279ce5fcd3f128e3307b4aecb56c3a86f257c724b7b8bd6565bb408189a41aaa2ff36154ac1ee7291798dac3e89a5efe5a381d8c5aac949309744fc0aa84fe409f88064397ebbc662e3f5dbeb24b8c8628c48603124d395785a30cae026effe2ee1b79d256d59f4331ac81411dda3294f371ebf6427fcfbddf89486b3de2b291bc47b972a0c6995629023b4f879e447303fde1bf8a283fd25ee711b95343eb603ff30b5a06f8dccd08d2aad50ef2c096ed11251d2ac2f4c613c396056848cfa0212179c7f49d03b5947417f0e7e721f1b1d75ce7c836ae5334de7d1055b0d67fd35f015f5169074b7484ba38fa39385dbcfe3f61e2cb94d99c31fb4f4195686b05023887bcdcbdb22ff6ff69062f7b5b747f6543cd589f9bf6e204bbc31aecc34d1c128c4e353a8232bcd3d493ec1c373add84dc7fccd51a1e244c4228f02a5bfa7bc71354d2300eb7ad27f3a3fe93821ee8251442e529bb90ee8dde17f5ed3e710f99a54d828e848449c92ce9002b30e4243d05b7e64d62308e3c9d29c7d088fd1b942ea0e55973a0428d1e70adf98656cd32db7475437620499691e6d553cb7f606778b20692218cdf76a14b3771f7be8e55e0e0dcd863abe481be367e203390345397c7ff1925cb3b3414a5fe1c64b8f6e1229a158e0f7d41a6641b5f78cb2728692dab94d7e41f0af49935d90c2fd8e715dde1a9c23c5f15ade136905a58975f57e30a763847e5533a43b61acb851347dfb765d02b3d49cc22657de1afcd8c554ef6b04d87512ee5ce6cb8ab85f1845193155fca50757eea958ef188642237ce2c87d50b2e5ce16b5ee95d4d38622cc8c4816cbfb13ab9e865141a090bcb1f04d509157bc86a36dbdb433a428cef964824415e0ae7e44fd4db52d0f3aa5b245578d9149f1f650b1b42253788c63ab11a97fecb457862f7f00337937eee890e6d10dab730db71a57e0858ed635b8c90057389b5fb5e17c4998d89103b2a50132860acb670101a103d359f98790323a57c2aa60787ace8bf039661f7589fc8ac627981f9237bb701a8913e026fd28cb918d7f345eb6726cb20c447dd21493c22e4312cc6be76cb1f09faacd407273779cc6c82a5f"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f000010aa80)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(0xffffffffffffffff, 0xd000943d, &(0x7f000010ac80)={0x41c, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}], 0x7, "af871e63be11c1"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f000010cc80)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f000010ce80)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f000010d080)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(0xffffffffffffffff, 0xd000943d, &(0x7f000010d280)={0x3, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r0}, {0x0, r1}, {r2}, {r3, r4}, {r5, r6}, {r7}, {r8, r9}, {r10, r11}, {r12, r13}, {r14}, {r15, r16}, {r17, r18}, {r19, r20}, {0x0, r21}, {r22}, {0x0, r23}, {0x0, r24}, {0x0, r25}], 0x8, "8cf69e6f99b7bb"}) r26 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r26, 0x0, &(0x7f0000001440)) 00:20:21 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x5c000000, &(0x7f0000001440)) 00:20:21 executing program 4: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f0000000080)={&(0x7f0000000040)=[0x0], 0x1}) 00:20:21 executing program 3: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0, 0x0, 0x0], 0x3, 0xf235955baba59ed3, 0x0, 0xffffffffffffffff}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(0xffffffffffffffff, 0xc00464c9, &(0x7f0000001240)={r1}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r2, 0x40182103, &(0x7f0000000040)={0x0, 0x2, r0, 0x3f}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r2, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0], 0x4, 0x80000, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r2, 0xc00464c9, &(0x7f0000000100)={r3}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000000)={r1}) [ 1221.614549][ T6190] Bluetooth: hci1: command 0x0409 tx timeout 00:20:21 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc}) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:21 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0x5c00, 0x0) 00:20:21 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) (async) r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f0000000080)={&(0x7f0000000040)=[0x0], 0x1}) 00:20:21 executing program 1: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0, 0x0, 0x0], 0x3, 0xf235955baba59ed3, 0x0, 0xffffffffffffffff}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(0xffffffffffffffff, 0xc00464c9, &(0x7f0000001240)={r1}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r2, 0x40182103, &(0x7f0000000040)={0x0, 0x2, r0, 0x3f}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r2, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0], 0x4, 0x80000, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r2, 0xc00464c9, &(0x7f0000000100)={r3}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000000)={r1}) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0, 0x0, 0x0], 0x3, 0xf235955baba59ed3}) (async) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(0xffffffffffffffff, 0xc00464c9, &(0x7f0000001240)={r1}) (async) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r2, 0x40182103, &(0x7f0000000040)={0x0, 0x2, r0, 0x3f}) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r2, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0], 0x4, 0x80000}) (async) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r2, 0xc00464c9, &(0x7f0000000100)={r3}) (async) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000000)={r1}) (async) [ 1221.779817][ T3198] netdevsim netdevsim0 netdevsim0: renamed from eth0 00:20:21 executing program 3: ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f00000fd880)={0x0, ""/256, 0x0}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f00000fda80)={0x0, 0x0, "da7048e1ed776d7c59a96c244735425e9920c8d2bbaec41a1cef0b8db1c850512e0b14f7cb58815c11ed4de3d749192fcab22ccc00e907a86620dabdeb1e35dd59823a50cb8db38bf00d58570ae08f832df34ae6638685d17aa6f1135d06b11d88a2382ee99bb226ea6d3abf8db57b7f4de829f7786e3944a04c0f27f63b09530d833f24a9558fe4cc8fb82387f5a4ae22ef24e7d4793b88f424d993ea7a394dcfa835cfb21fe38b4126f3d39590f1aec6b6cf213775b998579149945a92aac8e2ccc0c2bf612d0f7e1e892c0a563306a543fab92a1bb551ff060528b395cae288b242e6f8b69d6db2cbd7d2e1ceef3819a2103d6dc744695122f67802b124cd", "94463879b95679de8c4951fd9f987b92be4c991d388e6eb1c8e425a1ec338c3f774d728094bc8fa34814622ce61eb329d857f6fb9470be61514d9d0d0aa037f09c1d5dd27f916fab3b45236e7edd3ba15c7dbf208f7ec027efbffd1d08c6fba34e4ffe87c19dc8d32000396da6a959e3d4ccd8f0a56efb3039c46337aed5b0ed7a0eb3ba3991957f8df7855fa802c7ca14a7cef03803ca4af57c366d6f0b326794197f8447328ea32ce8b72da38134a1b3dcc911c1a4761ac343b60590036c6e10998a60f1eb8eee7a275a27a9031d88141413499a97aea1c7639408afaf99801bb081edc07eaffa3206c5ae260f4264f8e72dcca66b8c247084e7807340e59a7feb6f9de7dfe4fc223b5cc927a5cc7509038c075e1dffe9823eb68139f32f6550b2c479c668689e2979790b11971338fa330c130547ccd32a93ef739633913522734c6d308950bf90a081aabb9c486b1512dc6444f7aa89fdaa00c048ee261d8fd3ea3b7938eae56e24015cb7288ecd5ffd1418a4b0a95684e9508a9bb1bd51af2d4e350293bcdf0783de920261027ded64165c215a6c536bab2d63b92a3d1796a25b5ae0465c2255d50cba9b625618ca1b9e51feee23a1013299d5b545b9638a4f8feffb2fdf36a879bf3553792c64e67fd3cf003efbb5e4b630ae342d42423269a4f8152950d228bd08432a9ed4805495ec0ae2e55743e4ac40127dcf45121617cab90ecc5f60ba017695462a1007d2f4a507f579287856b5e625abf4668f356b319b867ca72bf4d871bd267deee98f6c681679cd6e9052be435b94c3d9f931b0d17ac5d11e9fb5f982278c3943cfa3eab7b6bf5280c9c241c54e301b74ddb14d8b96bbbed637d34ea89f59c481e46928f123cae8eab3c994c6ed8f4468061bd27da9d53354f26be61537a94eda96a8a629e2a5c0f40d2ca491e83239519e9a4b3696ed9106a7998e19ab40f5cc999455c2ebd8a16e4ce252f314809606a468c9a360efc7c143a13aa03f7e0e298e2f633f345df76019fa0bb06e0b0366a7729a3a0d6196be89ad6a15d9b1fbf857ea005adcfbe074454e5f2b774e5c066899d419333c7089842c9cdae493b170f8efdf298c5d85e722a470070698b39821a15d5496f38467a3a0b1dc9028c97c7ba348b4cb32351d6e8e88e1216cbffd09991159299428c7a097176c696bd627d2e15f7c77e652f58f249c79f66872d3c3ba12889e95a34ea3041f0becf4fd917de03e55eab4baa758758ad0e1928829d2972ea1b78c3287132c42a0d784a37d623a28e582f0a2077f855a5aac6b3a84fd09f413596cf68085aeae4f4c857ba566b076b07b8d981a0352994e629a67125b6d7f913c24c386dc70e3afab3c0f6466a27de24711981d452df2c43cf13b5fd8bb7e9842c819d6679cd89db3cf0d81540b89f65c67cdea2935dacdce03ee138ef9455a3a235703c91a5a1ac7daa575d083382518225f128ab7c5718f294c15fa487ecf25bb3dbb8f9c64a65a7fc5187fa1752c55d5ee394b59d15f10bd6b0caee44840caf48207462c96cb0cc04040529e851b3b0eec6cac4dd829e938b0e93eeaab188a4445b3b0909a32c020797aa5b75f197e18be1ca558eb44b7e05b8cc6e811d9db675be59c31e648f65e8071b93eff451b85fac3d9d16f9e349a5e9cae120ba2ab530e42bc780882e6e5af0927e7c0abd2abecbf42c773fd8068f95857b43c0d6b3963cfee52fa39a7271b4b7c835ea25b240e764f7037d03eefa41680381f26b77a658a892c43296e8bcbd1077ee5613d02bd27030c2dc1d0ed21ab1ba0421fe4ed6d7583b74f860ec4d088a7e4bc3033db91a3e51c0e6e9a5de049de9a269814e0e3a3b4c45705ef9e53da445d87b32d1c1b56b59b4dd2b25ee66c8b1adc828dec9377e7b66faa100cb5c2f7af3e253caa68c855a6570e1a69c6a9d926b8b957ac9870984827bcbc9ba9a9d81e133776102c68572197a1ca74b99b1c020cf60c027f62bc56347b6c805cd6300f9cdc0c1085ca74241208b4b65c8a0bca9829da444d9b5bf03dd9090bc2317e0d8f8389fd499d6659f2feea59b972c439705f4175fa8fc461d25ff17dfa7639950927c125e0bb447922dbcc3051ede2aa25162e8eab21a291110f55e16c0650a81ea1175a81e410e387ef94a90fc9dafc913ecd338242d6b25c7a474c92885c9250bdac7c0871c71d06b12175d62bf52676f854f6616e14c8cbb9aba9c246ec5c7c6ea9adfe29d0a68993f62afe2b96c3f6ca7c47f383f38e3900a2bfe03a77141038ea5bd6bcacc2833eadff1f44bb3c09794894ae31e3b0c01ee8bfdf972d124cd1dbb6be22275eb9b7e609b0a4f157ed39f3b3cbbb05bcc8f2208f8c393c912f62cbfb3ccabb8db64539d88d0abb785067d18f17ccf60bf01fc8f5f6ae2890e5d438c6b0fae834192611baf4011b29251ac7ceb1f92e018bb21f64d35f2a2453ee77ff929fe7afa5893f4d0ac15e4a6fe48e1c0b7ced2bc1193a4da20a2ce2f0409c285284f2123045aab47e1b195b4bc9f5259c3e5d18666dd543b237b19ba864eabaf2c50949afdce340a5ea3c999fbbf6104bf6c4c156aed93cc152af54c9d11c626cb1df4c990aa950bf427f0b8233e80f38875613d0f71ec2fc06aa7b2433cc396ff78f205009edc46bfbdbba33f42558d05974fb4891027e87c728786d3d55da38944df11e5b7cc809bd3154aa61b1bf5a73e77f63a95639e4f502790a1b550f098157528f7a5107fc18531814bd4e5f7223802a37c751ee9a84b9fa9d8c123a576d734d190a888e9fd0d5e14a3d80d037484210f0d6c2bb64cc1f2f619c2af337091bc5a25627d8fff061361a60f0f7624ef51c2b120a9c233ad0fb277cf513e67fa1003376d096d7add4c40bca84e9c3ad7530eb39506ce6fd28a8dfd4c73fc302974f1651c78fb6777121fbe9dae44ab726dba9a159ddb6de8046c061e1d7cb8f70e79c23df2a2aa3fdd46bf760c8830f43f05befec15570feb9dad9a8e6af3486bdd0365a02e8bd7023cbaf1674f7b45a9ec9b9756804cc78d2b77e0702dca98777bad6907d8a97dee9f70c133b6484c5c19abca043e0ff519adc2540eafe82a19701259d354657ab8f84387624992fc0f905331393141742c6450fe19251c1fe60469e5c6713356c1c373e621c06b3a1d336c01c96725cce321b226f43ff35ea7fcc7d7df725f55230d68554ff5af3cc5e3b8c3e6b3123c819199e65b2dc11ddcac6164ea98a7dfb22162f80b0ce0dd8b911ffb6cc645003eba98449035e3ace18f56fcf4456e01264ec2a7a37838f104230135b59beee09bc40f73e96c833266cc64e9ee639e1529ef2a4e7eed54b9bb3101d67081345686ed150720b24d7621733505a46a30706b3667a2bf2aa85060d14bd8a953653c72aba0a4ac5a14848f59c87ed7fb6a89612de76ef0bc248e59cd2986a6c82b58d2b7708c249f682899f623541ac4818e009ce1ab6346363f3dea68311ccf5a0ff6dc2b10c0d240221d609a40f6a52d1d2e60597657e97e901a2e966b634d395cce2e00c9c05954b92b3c8ee8cad22556208d8263e54eb2dade0aa51d10c36761cf2e29be645976078f545d0acae2cb6554964ee746ce5142481473b353851d61d950ae7432627a532ba49ba8890b971ae289f81814c77e26bcd26519b858118d3188eaec9ada8154b7a679280eb48b5017b624006a4bba794e91df6f85129e6f10506c2062656df5b9eea8d16d482f081a92b79af0e460c2a60cad86728a6759184a0a2ee1832d409f772212685fcad65d6e947a88f6113b6038fa215b8ed9eacf218fc945bc4c9476f5f108b0d06c292d3daed155a8aeab18f4e60f6a71d46c2cad366f15578fa21c414f0960a7a65d4bb0ce935f3b17ea0eb8f7f0c1426cbb48a541c9a32182f60bc8989d440dca27fb3ef0348009a5b1d3d7069327dac73f04e3e29113ce78cc4af1647971753e1f5913b58c07f7568697e286973b0c56f8caa594670c62ed3747b1f93a2586dbde254d36cf359e9c978697a66510afd77a3c94afec580726252284aa458b17ae359b9add39e14a1747376f6a5fdc20f00de1b09d802d2c699725b0f6c9072df150917349772c4242f887d8e91498b2496ec3b3d648cfd4904251fd09a1cbe08c8ea7dbf1ada71a743163a1add8ae5640669b6de27de60c287807c422e985faa303dafa700db69a7507c677cc628ae427f2174245ce84a21ed01e4ef98a7a9be23fa2934ff699bea8f0736719f85cc5e41908eb557aee74c0a743d636cfeb2bcbcde830143186c7b0dbc3354e3f666f659cde6ccb14d3e8313ea7f62b2157a969c8dbe58e46d74a0db6b6b3228049cc8534fb8524765e481974e732237c3c68298559231bec518bf20865dd1d5d875ee2a90d2616f7288a58c8d3158070373506f808bff8943b0ae2042fc8a1ba09f2787d64c45941935852a79b95793442a88645a33098e34e7e6fb2ab9964494e248ecba13fe87bd9a6956d9c2281731f2fcd2cf7bbcceb104dfa4d15b716c1f9dd6ebd1792c78c88286e2bd8dccabb8c39a6571cbf6d4dc9a1867709e633a8fb236ab453461ad27224902ff66e483aabb9ed4284cfdd36b2e464c4dccbd56b14e2f4fa8aadc13179c6fe4867967a8258279c41889c66de70ffaf20e03df8925ec5169aef77f95aeed25b26d1b53eff10d29993da8d3d20bc82934a338d0696932b1595751254a39d66ec29f706bc8327f029d0e5c91983b17632282cdf22e4bfa923712a9aedbf0dd381fed0fe7b616a5285dee8fe5a9600ce3b2b9fdaa9a84aff740adbea500c63b4d72cf6934213304712d2911a516038e9dfe989212cb54099a151eb3897a5453bb5b663a5d389cf2a02b319d70e1d1a763e1c0542889123f7aa0ac1a7a824f337721dd1bffca1d8a7130958f4cca9d7a070a59b423ca7dc7d468808b93f9b793239ac2800b03ad5ed74fb289cbe4615b41c1707e082e0edf428b1f55a17ac0a4327b6c2e56da14ca0719cd67c5a694bedf861ea156f28d8988d37af6b1a4deb9978b7e502267d8c02c8f1945f84b0f464da219e9e17c8e437c5b0d61f5bc1c3db53b2992c188031756bb829c508ee614557609111ff19480b0a31bc3b4407f8711d75ff63ae0e108a770ca13fcc7386bdae45331f3ee1d1d0ef644ab4122e7f320de322cbfc0051b3c850ef69302856706b8ddf18148be0b67c0e57b590afa8525dc61bad9e681e691efa740724e93038431b9381025be8b2d410d930cf36a8723e123286b3bfc729efb3caba3ce32fd53ca2e5737a0694512577ceb1365f0229470b3a14767e4f63c3eab8a09c8e19e8e4e4458c49ff5f64b9153a101897edaa22b93a8854a210bb211f6dc14628fa04903c6ed8a7667eba5e882e"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f00000fea80)={0x0, 0x0, "1cb8e3ee7111829d19aa22a1f7b71c38ae31f7d48493d090e6d8c282d27a497fb3550ea628a02f7e6f982c622570f566eaf782b55f8ebff70c935740e54d92ce2319b507a792fdb2f7f50dd4272566849b99ba46e2e480fa711f4eb46a64caf288d5cfb0137d407128628f2c67493e62440dce8f439a63949c9b3c7b098762d870394225db7a8aa53e925d1242db499461c631bcb236141a0fe26dae6e91bd0e34c7cf04e39edeeddb2efdbf3c34ca9d1ac69f98060132362887ab4da723b7921f126f49e113d02d67cab717f0b4af225045e2c9cf9f4889191619296117d2ed482e320d2cc727fbc21a7799bac5e2c615e6395f6d2161fb6b7a800e92c4b67c", "830260b4127a5cad79a908f86337b73116271d3a824e537f1015f2dfb9f6f546acf09c9bb012c7c5b7aa836fa4975f5f51cdde6215eb10e4cdd3c24671c1544370adc178d466356cf0ce1cfcfde870044e7f5e77ffd975820f9d19bac5094b8d1e2004821e113a0386791a3ae94e4998ab04c6166d89a89304972072046b326e14e6a32c7593b5e448853b439d1c7831b4435079ed544ca2e7894e7d8860d54def6bf86a3458f1b33e9ed2db712162460211c7c0aae9db80b1878c2334ce431c0f0394e14a4ab36761c9b57b6c23cfbac2a7a82f7ecbd0e82035eb28b6473a761a0dea32c98c75feccfe72f102bc7fd7a54c3ef9aeaf82e1a1886e8f9c02f9150e9b0f07d475710fd1330a9268b3594a0aca0d9bc989f8ab113d93c09900c5d11afa0c63c6cb4db3e3ca2b2df0d18ee3f1cea9d62404b5d781d8caf031eabf5a64e2094c3bbd528f9f2b5cf0e47aa4e6fd897eff2da56297ebe0791419b6cc5f766001fc90aafc167ac6b95168f7001f12b2f19468f6d3bcd500c39aed3f5cefc04b33448605b60bb9758c865dfbede8cea2a59a3808eaf6c995180a66b7d17fd946410371e36121c90df0d318211b1b2fd3ebf2dc53a6a51da268c46346f113597f5238e80c7808e8fac6ef67419dd360aa66287de6862f70e82c8eeca665f4963d314d6d1028e22afc7e7f8a7d79f62851210442acf9236c5dbb8025c98c84ba442a2bd4dcad411ed5530caa62a1911c10cd452f5c20beb7f4ea9cb27a23a32b1d9b3b029340109cc5ecb511dfe38ea0b943957ddde47714fb9c19761f049b0fb8f6a5983e7c4e9246ba900618231fc7864305b7b63d602163e2d8eb2c5a27e85d3e91798953dc40f4d9dd4bd1e7e287939a834ae4290d2c6ca573485e3eac0fae4da1a730bc8b67597067324d9faebd9fae493097ea83df7bbbfe47b13e5d96901b7a016d75dbc5e69216f2d7bc20ff38be41dde76670bee2c7b6dc63af393c33560a0416562d1f38dc5213addf9c171f96bfc2919f7d9f096dca5b9ab0b1ba03d6dbbb33cbceb4ac2ccaaa64d81a38e6b61fa1bab2106c787906e05fca686220bb565d335f38c23f382ca187108e968204d514b05d5ec9abf987daee7fb195b1f21890e3c382886c7057bf8135e54ad44c1907d37b614688eea3fc4b55fd9c8aec0d21f2dbb07587ed9fd5f5f6aae84eb2a4541a201dd600d65693002d31bad78e187b28c60e942cf54a8c457664f2f6f9071a72356519d3b573de10a202dd93f6614f98970dd7589a9645d742307bb98471a55c083d63314aa252f2a70490c7ecea06832689ae3b6ca079ec973ce7148f4820630461bc3efaacccbf36328001354eebd9fee036783430aedd83c1ec2dc7be00d8d32c2de3d3c9916f9da0e84b641cc5789c59f32c90cac549827f545f9e0ee4a6299564cdb693817be4b26079e9280203db7c1d5c4b9228fb7bf93c4e1398a904d97dda2a049c73939a690de8baede18db84659909a61c978c1df5bfcd970ff1612336ab4fb5cfd2576cb227c21ba292fb6a55dc507782076427e4fbe8f2fb91b16480e31071369627ab1ee137a8781025c3ef22a0d81b935914a42881cbe759ce77d037fc8a7c4afe8a2828d31fc9b5b82d22a0689c652c19f43bc2029743e340fac09747110b54a7252e8715e1db022aa9b841dc251ba75116f8ad4cca4a143e448ed901457ccd4147f9a26a608d1743f4555951b467eb4a1bdc51b85803215e72a4ddcec7d8eec8a927039ce6eefbd633a869e4b0517311e5a3f969495b3b0fb7bd4af2ad87b04a7190f6fda9f7c5a81ef7bf7e3a96a3d9ce3ea1ebbc70f69739a5fe51328f6d735b72c925817ae0031f9b5b67c1749532fb289a3a87d7e810b584fc65af6fb7a2a7be331d795a3cae5c7e0967ea5c0d53faeb1d99bc374dc40221ed7cbe7d8eadea49f0b14ec621853d888475ea99b79f11033f3e88e38de6026e9ebb2e76f38e627fb34a0aca1381e70ea73d68829f9943276cea738cf16bcc85a18a98612a7bf3bcea92e4b661ae58533f6134706abf45ba09b16b338d03f819c5b745551e8a7cc4619ed48cd67527874e2daa09c42390675932dc17b07d959180a08f95620b67f51bc7cd418ca23be82e7723154d11c054cf4aa74cbf7fbc66bd1409df7be35af212c819b19da3aaab6404fa4c412418e2d9eb7fa0381bb3d2d76a3af68a06f1de4206da6a00d49aa139415c105799921e5f45159f0812c5fdc88800d87e6f317fdeec06df51bdbfe84df91203f7ef97edb8140bc3ccd60499496290021a587a0289b019ec650585e7f6a69f4e253e41bf2b2a3bdf13b952e028ce3d13c35e3fef9eee0ca1cf025bab6844d7b6030184223d5a49fc3b46c8924e5d0d4ae9fc5b7ae9888d06885bc1a8d6190c9fc54250ef95dd8e7667d62f3b63b097da9805ce36e1a2297b5940f32aba9dc2d8ae332e95463d762e4b4e555f73d8a9f5084e5bb93a78fc91cfdd9035fa2d15abbf66373a07a30bb0828762c32f3f7277bc7230577ab125b96759280e42d22fa13092ec084edd022dffd23273aca981c78d892dbb4d3a7896e43c610d349bc21640a5e353869a09fbc88389687ff11a4ad8f0b095ab363401653bcb3fd278e973ea64133ae9283741ef197afaeafb70d25e69709edb521bd16bdbcdc407840933ebd8872bfe36211073aeab84392925fcb180b11ad4aca52bd22344be57fa42d256ce86da2a16b5f4c28497481117a48f58aacfb74fb6a476aa87b4c1fa38b91a9ee4e76904a5bfdada9a7dbf1594968b4dbf9c41a0d4a3a00ba55eb3a15e5a4f79a918fd657a95a7616bbf81e5d914b65710a2e1ef410799b0324d0c9cec6b499e5b75b8a3bdf5ee0d9c6331b0828e3027a32321ccec697d5eeadaa23463bfdd37022c4a18d5e4a2bd6f72edb588374e2bad574d972369d8a3175980a156b345fb1de086fe2942290479cde1bdc4313421b89b0f2b53eb96adf46ba7ef9306675ca9558fe05282e38121a28843ec486567455625f302e9d4f589b4ef0fd537c206675c5a7a5af253ed1fbea52e428a834aeda6d922c8a031f51e4fd4c595398d86439f006ffa4bad3ea8211d4658d2c141884e55b630f617b2727cd541331bac7ef08877db5b7a615caf10fe349a4ac36318e761292e8224d060727455b4e2cbc99f6ec77bdcb3c7696e858781ee721dada998b7db722c64b9bd1727d6bd9f17c7eb32e03c294d42d078c49b599a274535e9c66f3130b48e49cf35495acd2253828892c8ff5b62c1438e164e52cc3e47e2f5491dfc32514df3a08a2d7b7850d5715a42ad8b1d51a5170876dd695b5bbb8446d72affeabb120694afb80ec37fe0aa68e292b237047e1e00c79ae7521ac2d99d00c031060bfa4ca93fe55a638d78f8534d9502f530d9a727fd6198819cc54b2412a994f2f1e6804e2cf330f068cb393e313d633cc468b57e86cfc5efd0127e46d94a4693fba0e909d803862fdccd2554b3377dfd72d5212d7d57c0c2056a15ff8f63d9f72ce64d1a6328823dce26abea34d5a74a3f05c38be4bb9d8ba1944c7aaa437410f885eabe1cdd81c4b0c8ec2483c2de8176eef94105383f8b4cf7b297928296e0aff7ca953abed402a4b62a1d52cb04f10b9f18c77e5aede15860032eda339da2fb6e76a231c7acad1ba7f10bf2ae8891e9c2b9a3327006903eb7f6243c21fe92a701d7640513b9c494ea0709ff7ad8f5d8b8ae79d6604a9cf39793cd8abf8f1ba46358f8a2fbfe973f51d505e719a625a8c969bf96e57d91b1b9265fa0d4c86ccde3f100b89eb84bd787d33bbb10d2e72bf6504e3fdab762349a73c43d6c815ec3626d21266662dc572cbbd3c6b9acd9877cd28b821befde8544027b7ab63a57c2786ed60c8d2d08840bc1725a09cedc4b0a060bbf917b0cb1190442c367ae478c34141360ba40c70937e7eb8b0a3dbfce372e1bcb53365485f2412a8a39f3d49929616626566b4b27bbe84b1f7dde1151fc96eb936f491ed617df118e7bf98715ebff9b3bd40fc0bd21ff71e64610516a64dcfc39b3a61ecac95189b3de20a4f6938fc0db192d95a7f6933904785b3bd5877a9c0f5ebdd119cac3dd6930bb9d841287e3c3213d6ce0761026868a0ae1ecf7e17f4705c83cef22571e23d35bf763ac79e3ed126dce77230378fd2be21a11e4ec77de11fa04076bdeba56ab0b9df9229b0ff64e9e2752def36666ff9385ccd17324bc1db88d9f20c9cc93f7cfa8c3c4261270ec8f6d577d9740108dab6b3169905c47950ec9cddf2fe41beefcb0b2cb4822fee081d3c7fa57357026a462b041a29ad326aa20d9878ec125c1ae3960a731061f51731311bb470451f82776b9ca46eebdf72860b53dbb77bee964faf7af070a1415ad1735720213f725c6388ce977e456c95fb164f99ca9b56a6708e75512e9d9bb8856d7a6edd55c926f9793ffe494f90a946a8d6a9e35bb9b58fb33901cb55ad361629743e816a86e21c72c9cda063eb10285dce394ea6a243d065053aa2336e3a2d3d4e02e49ccac7535ca1d5fac63e3a2ac866d574faaebbe991be958e18b97d68efe0e65de353fa0d45697ac616040db91cee69c8e15f60b47e915da2610aab4f7df6436a0d63a1d78722d83bd07ee7e31268ddd3824d8ea794f226a48faffd0d651a10adb04fec628b70309880db652b67e4bbe94a1f17577c51d62d9014602f15d955b458ac2ab25b64332b311ff3008571426c1ba8c5ff9ef18a0536beaffbc6c86b005717db5a004fdaac6ab8f126e3039c4c58b957d526ca0fcd8ae979068e94cd527b3f31a35401560ee4d00101e5928c1781f406708c105560aeb6455bc2675b5ada399acaaf7da8ba024a3e2377a6655669291f547b32c2815da3e81bfb7dc658b307a75976b2d5fe3e6ecf27d9d3669e227ed516bc949ee2ac581895aa319965bb7f6279410968558404978e059b1dbcf0ee0cd24f6de8b6f82adadcc17df0d231820d7e83fda85112bd8d7941a36825a8422318114ec1160df65bce56429f6ca6406f2fc5e3baa485ae503e832205b6186008ccf4b42130fc69beda59571a225843587425948321ee6c9c3a2221751ff4d4a7b7ef9ec55a39fa16c8785fe64474f65f781538f74e7e86ab4cd8b9c7adbe47e3e787b5d20a9b72d62ea58877f34ec2ff82d940f4bd00f48c869e0681e0af9523c8aec711b703fc01321c9903d136046c710c307e413f203c50af9d1ed64870894a44a0ed49fc5c1bb17b97c8a4c9c58ca72212b007edf39ffe6b944761ae5a7835cfc8e1729af7f33ca148b1bc6748784f56bbddee72559fc16b5314f51f700f15b343d5bd02bb1ac003431d8743d30b7f9c1d0e4ba1095337abfacc977474527283bea0dcbf4debcbc7448704f06f"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f00000ffa80)={0x0}) (async, rerun: 64) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f00000ffc80)={0x0, 0x0, "c9c31ffb81b8f7d7058236e52c99bb1931668c838396ab2ed9c7da95d921de283155336892039df46b3cdd86fe7c941066e86777fcaba96e58b84d52e6019e3d126071c11366197cd3af6d84d16024a160f4bbd98cd9487f2ce1f343d7f6fc87a45f051e7afc030a629636b400ee1b03181a9e75ee24cb89991f4c5702ed82ffb417e3205e209ab27232df3af166f92d9339e7c3a1e0fdcc3726d6c5429bf9d8cf2d5f33b06db8045fab1b8ca4b7b16228c7adee2eba3bbe935bfed80840de9267689a86b1622786b25f9e6bef89254eb57153809e1ccec92558bcdf4a8e74c59f2ee2a982c586531458c9f5e06edcca54ce9172197edb6b0a57b0f5d7f4921c", "70d816e6bdbb1e2573610fe6e458ba95507d8f377bd4aed348a2ba13fdbc998b34c6a6afbd7d9bdee78c34ef346bd0c2d9a9013363d726e163354eba18336c9e80d705779e2379e94abca4dcf8952c83ca046cdb3e1057d15cf06d9b46c2d613b0ad3490371df13c2d3dedbf74e68ba37a15da75194359e4b53bab3bf4bce0c75cbdd10b523e6b29e8cad91b27872c0b32e35da374f366efdf2f7ded983414199659c4a8ab9448dddac244ce1672d2ff96371ddec435dded5aade0076000469df55fb7047f5e80a5fa3423f25a292f0d644c3d0a4a9863668c5e8e8890e3fdd302da16529d9106cb57d63ecb150d51ea49f1fb15dc0ef2463b4a802f481d68c3b0e228948fafe015b2b0f2b59560f8c46943e39682dbc969c77db54792de245109c5d002bb3a758a92958e86db41449d7ca35c2669a4c723332ead572460226add3cd30221be48f93f664d8ed982b356a8934553ad0d9eb01e54df7ea5bbb4280583fc43e0c1473cfef6ac87ccad4f5a964759626afef787164e1c16f0ca19f4367db2cee05620e0e232d0bca2dd2357c4076640630b8cbd10e969172e88f9ed1f99fcda762461ebc140531f4e8cf0eda56fa143d58d888bb5d1bfcb737578f17a933b28415b6b91ed49c9e3207b9ebc8c1a2a409e9eabd8a8e8b723a7710d4f6b84e7f07c7c779d58f358bc1bcacac074e2d7e289c468cc0df2cf169a436d849bb7aa0074872a7eeaf3fbe2580fcb57f16b1de727eaadb81df38f841dfefb7f75cc6b6948e17a355851a203fc0332a5ef2693992714f754d5210a53250923ee204229b03ac26dbc04a20b78abf2f194845c183f0d786e3ed134f96a6916a9232ebe79030fcb3212122d3bc3d65f5005a694ba78188f948fdfb909a944653983203df617e006dddf134a3d5081c18cf39d37b016a50637831631ede8f00e7ca42fc97f0463dcf4d67ab4c561ac5a0cc4f28b743bebcef5bddb2fdb9ae124a0a09bd900d961c7cab5c9a29133b2fe19f2065408546b9b3f21cd9885bda00043fb795e257d7f09c893a38104dfaad37c4032c9275ce3e36c57cdd6e0f0f030e3601a388b8277d1a2438e52c1e5896c4304bbb9956e355274852bcbfa5ce39eadc0b6a8709d45a890a8ed41e5eaae4c0b5fccb3d6b501ab480694e7da33e6f9d6347f0678312b2ddf4e2a13ec206176f46ce01c5705be2c08a80784d1549614b5a277adf0a7eeed8d20efc3e98eca62db13671154fbded835fa8fc775cda2fbb15acb769675b28d1927256d3fe4ea8903b5267d6360bba729100ec1a261b0a32c69307039d2d4ee0077ea63bfc66ba3652437751c2c11028d122b59326b1c61831147eece55fd320bf4b387b7a747307e37e8f43c6ad8594ee004e6b881fdccf24f6fb922e8ab8c7b22539d55279ac3580703df9e89880cc7d87c0c993268a4c09bda2fa49e8045ce2d9721f52e7de11d89d0bb7c6010ff5d57144d1e355b015dbd97f7e1d7c9a1e462cef4ae1bd8815b3adae1e90e3cf1803990a4648c5bbc9c77e3b4a353327949a0f9eca3f6feb0ba4a1656e901f593faed41fa298663cefa90248a1ccb8de8abf39b03672964616ca70d949417929ba6ab147f47068cc6e251dfd37a9cdd590e8d4c32cb71d1d4a0a1805f0fbd89a9e2ee4830c064ee0d7824f7502d9e2228642eb219394e54a237b0c648b0474b432dd1abe3ce29dd14c793e2b1782bf74414a040661c09cb6d6629a50267e7e491c2eaadbd30fc0d80b49f75df0c1d70b538622b42363a438ac0908720cfc8efe30f925440854536166887d182e402667ce2418d426e458d7468b2274e6b8f6c7502aa5c72bbd7531e82b963d095de27dab527faa581d03474f363119f4c4d7209c3b1fa54b24e1c1c7cd498005e6918048424bb94840e60d8fccf410b1087369ed01677dadc9326e111d483a2d960d397d6883f03e0b3f3a7d3b649b9f9521d026dd7be96604bc8112223507e32dabf234393ecf969f6539825ae04b5897c48255fa2752666b16ba70920a7f8903beb69f80d02345021dbe37a59404e56eccb2bd16cb3846ecf630a44ae5eb6ef9e35d9641d949471988bb8272ea437ad57042c3fa468b533bf047cc731a03b771975feb8e758eca11b861136db1cd93fc7c49ac64623cda3e7dd6ff4f001c1f3a4b7ef5af9f61307f1e89b3b9ffdeeaef604f38f0e9bbfad8ba61186b0a1f6a468c2786ab632141a0308d1556aae9f3b6d3776bbb90db3277f9d26c4631d3388f7d9ee2c164e507b4c12742480b4368ae775e0dd013154c7edb32074d6ffe5aa90cc04cb67386c095975446ae2900002ea6031efebd65d383bb450646aad2b91c3f402643ccb6cb239cc234bfc04398c6ecb405cd4385086c5b2f0100963a708bbf591c6598e181c646225f2207690fa86cee55639082cbbb4f0d6739ee45d1ae4d4761e6d6f1aaed230cfb15243f3cd8239db84789cdc9bfe5f6d0907021fd48ab013ba8bd733270494a51d4790aff901c357ed001719519029fe352d2b4a7762f029c879605c1bf8891520ba38b1083ee8e4c005ddc43de405af2e1e51761aa63b1994cf74b8c978f80d3a2bb208dd88fa8463177462102884bc70ca49babdac226fd9e6fb44ed6b095db74b84ecfc77dae8818056d495b1637afcf5029939f06458226a439f9a55611cd39b256a751706cfaf28daf4435c78f7fdceb325de2ac41558e991582e6cb0a7b4750af97804ccb7b35f907ebbce5549f8e3fd63a2a57350651769bc5ee69dafbf5115732f3f59c5a83d336bac434574fa76c250f1cd1ea56e953ce8dfae5bc38dbd58ddaf38e7424ddc04d06576e452f201d075cd8da57d6e707206d2ef97841c0ebc8b9b1cb010b52fb23de593ac9dc5c54021ebe46abc65ef35683b2d73a458767c8236428dc2957bfdce2c7e02cec37f70284f3d184c02549a4f5dc89d2dded355fece1ed37abefc84a877cdf4c03fda672a225387b26cbb04167e9639ba978da389828246e8b6fcb39f2ccadafd4eb80c7791b36734e277945ec57e4c9e1afae3b08f3ee3f366bd2af02b4166e7cc7591c9d3388f7085e13df4f74b2ecd61ab6dee9215a17fce669cb9d7cc93750bd1a992f151f2f9cd02af0ccbce4379c9ad1a874b17865051966c30a912071185be37bf03fb44252de0a93dc52187b0568a76c53a51fe0e9145f3752493ffbf2e3be8eaa1a33a4c7e1949b709329a2f4ff73a5de399ce0151a7c0f8cb85dfa78d7fb6c54790ae766a4448cea147b99fbd3c563aa8b9cac0f66d1bf01c2b03e6423cd397670ab9ab300596aca05c4eb32007723eab89199cc49f31364dbc36a734e1f229897e328d069530acad0b731207d08a602be6ab74b384d556a6a7956fe8c3d71beb72ab0f611659cf97b5e0fbb8866915b67998273abf3cd143cb2e7bbc6c70fd57ca5877c2db75f368234ea0137e7e6f52cebcf288fa444d1c7c089f98ab662b9bb5857a66caab8a24e1294ac18fbea60acb3a737924d645bd2e49406fcc465477af70a8bb943606850b2edaadab29acd60acaf34e08d4ed993d18857e0aaf54f0ff6757f7229d22e1d53ab335393560ee6d5aeeab0f9704865e7c93134d5bdd0256e93319ba631fde69349ff9a3f3e504a05656b39eaff412e7bcb3157f8d4708c75f39563255fa9f77ef91a49b1a82cf4e4d078735d5fc2384af40b5f07c40cb5007870c37a50425e54153b5f16c8d46398fe7bdb8d3153f5aa839d747f2754596a31063e8ec1083194bede564460e5e7835418b5fe5854e9a83a47a28afb953cec69a7cd40f9a9ae0f80d4e126a3e1c2a4516744784ecc15de38e7b7ba8a0c2d98028ae8d10c2e45637a02c3c34e8ade29e3b771a69d755ffdf7703ce06505971001deb0d829cc30acd633a537cd6925ba2d665e6846147ebf3fa503e74fde4657b50d48bfccc39c80b7931ecf9426a917f35be94dc75ee744a4684dfcc26b2e06727f05b1b63d2c9d846402ed4a6f588a73ac985eb0d31e67bba5e3062f7e048c684375ddb75159998123d8b45c89906d5f07e4aa77a8d9e371ecc8c85bb260a9acc20d0ecde2fbdb4fe38ef4d60d99952592ecd2f7189058ccfd26bb7b41ef5aff92c0587755df8f966762d829b2f1056997d7fd9083c87008e29b9fbad1917236546220b86e29c4da3abed665a426c6a8b39ae0b5c56d1882dee413cf4bd4fb84baca4f331f8356feb5f06dcd4876d4d34ad83e3b66a79c51f6304581f5b58f10052d0e9018d35fb97fe6de3bccaf2993add27dfc667f2858acfab50d4a1ead364b1dbdaed85968c0779ceadc7cba158044a18d819484167a424971794f8f00ce5ff691e005718be0bd54385d6f2ed54e04cc6a1d9911b0a471287824c55653163ce008b71195fc1286588dc3801aa3c148c0c4a75b22d1f171cede035c7005ecd131c6d894fc8516d5702ed742980f846dcb8d5723dac51c84c93cd4fb7c3cd6c1492ec0edabda58672c651bb54754b6397dcf9995d19ab0ee503758a628f4aadcbd37978802c19a42a443ccbfb661fab9a1b1f2e0b91dd0293282d32ab4af477c2dfb2314297243d1f5bcf822e4c37750480ba328ca06f4733517b216a2a7eb11d999367d0b567aa84aa12b69790997d5f3756379b11d975c0ae26368b908c160fecc179a760ffd49c3ae96dfee60147ac35c89b4b69240882226f70a04cca06915adbbd6f7fe194e05880c476962b84ff694074ff3790c2d9dc93df48f30a40eecf6365e35f7f459a8f5d15a7f73c964a5361d35d0d5a1166da86c19e7cfb109f89d8833dbda7500316304bc746361d62ef925cfd990ed64d24289f5590a8e8a1f71ba83c15f291f62039c2d85ffd96f1ffa01df4557703b6d4bd31041af983c073510f33f52ce99a2acc0f913880c8f8734d0716b84ae531b0ff62d696323f7c87fa4eecbf9015f2e18806ccfda32344f1513b36cf66f7a9d60fee77de46637b8cf5172a8507e575b5edab32c1c3fb30f6a2d0efd044916480572bcebbf6df9f6bbd39d3e8b48463270969a2f51bc6c41a49184a2f8ded15d82ea6bcb632aeec9f67c0d1bb54840e5ef6f6deee81778975104537359f45983978db1d68967d55e86acea9facf8f2fbd985dd1e22de868ce95ca5769e94ff0eb63fe007b5ced722441e669e914d4df14a15262f680452978a221c5a461cce81baa283bd32b368780f0ea75bca5e319d3fa1b8da971f4af42d1adc4465d0519796d8f2809c2cc967d70ba7db57ddd8c920dbedc412f868f0be8043d520c49d77a60930ff00593e566fd9ccf6825a2e71e2428e3cedc0ef852f937fa3d87dca9c03040ca9969b7b630002bf9db55d8bdfa926463c341de3f74255230d693ece9eb404f3a7c6e8fcfaa6f22fc08756ed5a0"}) (rerun: 64) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000100c80)={0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(0xffffffffffffffff, 0xd000943d, &(0x7f0000100e80)={0x100000000, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x9, "7a57db910fce71"}) (async, rerun: 64) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000101e80)={0x0, ""/256, 0x0}) (async, rerun: 64) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000102080)={0x0, 0x0, "03a6672b8e2bcb72a67614f13f14ce369ac99db8243d792729d2a550f4c0729b6935ee28f908ac80b6bfd41c1fd5da4ba3ccc82f6cf7b329245d569142366eb11f85b4b5ba27574e80f50fbc358899cbcf6900701e2e5414c684cc46df0dadd52684d087e5879baa735ed64ba2ac024f5bd62a3af2c3ec2cf40395a262832aa4b211707b47c3ea4c69e61da79a24b2ddb0147aa4635bc4856db16d5487786abef21422dfcaa0bc93a18e03740f6cad74a09d1b233f9b6306e867681bf46548d2f49fb5eff96af6361c8ddd966bfbbc07515f3c7dfeb8da82c5da8951419a3e44f96e258f027691149dfd9834327e0ab153c25a82d2095a8c343e79b62472e5be", "dcf7f91087d08c1880567dc5ad588c061dcd36b1d65163a806d6e09c0347c281b515518084f751020726a4dc16b7c6441a790da4fcbd6d150c1ef1d2fc4c67eda1d640bcd410590211c3b17ca0bfd73bdb8a3c2e0dc18f1b94f590d3b1bebf4e17c0d863294994f8c9dc5c674f4ef39a0dde042a579b56fe52b8d189ee62c425f3ad72213caa6e3ed93f5b443e55fe4fa2f5bd551076fd3129fda3610033ef88f993c92c13c15a2680865473854a572d35edadd645655186bd03bd1c88fc50c726269438fa949a22b5eadb2a235cefa91efd6d50ce730405265dee9c2abc5c9df1ef031a91d0c06e91f00a35db55581e0607e51efad4b249db3dd179b72b2b3575070bca9b3f6ab03ae0ff36415de799e9baa6dd906177872d809210f901fb53aa30056b5ac253aab1319e63083020d7653c271217117cbb2949b292e4931783131897f402c3895096407d0326a45a7f58e4f5bbed105a0b078fd91a80462603ca0756afd484d6815aa8c3c76e7a5c77735539ff8cfbefe4ed894ef8d1853d1b93168a84e1166f721d28921c2cbb2af492eb619c469a00008110ae0c64c309b40aca4c0cd50685ba67b2976f306a335fa628e8def2978da0068d342a9a091adf13bc88574aa33b9d2869060c785f268bb9e912c02dfe293c84391480dd34cd6bf0e2e8babab590ac9f4b10cc4d3f3600014cb38988423d15f5095bc828f07737e9a7c378bf89d7275948eb451a54474f74116a12b302a886a5a028d12ae112d69354b0dcfa1e3f0df2283d9962593d7851620b8730354ba5ec4b6fe25d7656afdadccd3bb015aab1eda85364e5a6ee99551c732328413d407508263b5f47cc367651f47c8b7929da62855487720c4c43d100b82c00382cdc573a45c70e31fc118dc4d6bef7defef60a926665b097c95794a1ba87414e2b7b900ffa8f6994a9cccad2a835ac9f9c12c3328c4ecc3cffe43e46f959cd652119aff786482eefb84b1f7e8b11288690567d62f484798ea428bd17e7dff3a0a18e56faf2f56f3d851a382c0b4e6a9b3d027ce07e53d5db847060ae37f830b7e7baa943ac10ff3cc73094cb78c64ae368be9c4a7de0127f1eac24977e08de110f942b166573416b1b6d75084a65a64d255d7181cb4b2e1a8a31d400a57f50ff23666be1c56856bef62e68557d9c58f1a3dd95f6814b37b5955db6c0fac80f625f85eebf220a9011db64a061e70e6393f27820743f679947e1b6f01d2f85de0201565bf5964e886d5ff774d66b13ee53e3de0df69d4df82e09624eb4d6eed074992740bd244af9c71adc702c95bd4bcb9fc2bc0e122034c31302b0bf8d954607aba4a19d4c0c95f23fc9eb95f6cddea3e67bc272bff4c38a672ba1597e180d176b7de0cc5bc609b6a9373e489bc8f66313bfb6e9161fb19a31170599c851e96b9bbe132bede216de8f8e3aadba0dfeb5714e6c40b3dd7a339c6d587ca96b5ed7846f4bd17518ea2d5e3169aceec8ac1aae38fd3123ddd368f6d55788c97764cf8aaa968601a880fb857a14bcba5f34e0abc3b17bad12a78cbff77b1bde05df960799e6f1bfa93bafec7f047a39ba51e789b8cdd9043c5fd7614f750b739a110b1a971848466948ac1d740aa1fa2aa31d10f37f988ef8a7c5e6b1597f5e9c483af9cf297dd8fac44edde43f1cf087047a2587d093220af0e76070f9e450befe880b15cbe34a726801fa8a53a53c180f6fe20d29f624bf19e13611c9a4c1f6060a803eb5073acd25c09bf5f0771875f70b0e760f46310b5dd20e3ed9f9cfdd1722dfa4cfb0787fb35a67fce9adcf74f1857d5ff19f79b91e15c3ced5330788731fcf2046f289e2cd337ebcece48ed2f2ee2e01d24055eea68e551c49518ce2694da80432f027931b58822e0162d73412a1d2078a14d9d783046ce6294cb5eb161b9a55075e8bac4f6fbd53c3f4e228e94854a3e464d1d129595c1016828317cc617cb4cf324d46a77b45d658c07019e836698b19e4947583d3ddf175b0204a29c6847bd44400e5b79ef44e06d71cb68dec188fc6abc810af5aac94a43ae400e963001347f29eb21ccc4b00d1e0f3b352f0002b8a0a4ae3b949b7eba7832a1b1b10c53d02bf44ac2c78d8aae735b8b04ae9e88e34535532fb2f22eaaac766e8e990334e3434912bd278af7e6a6fb61aa9b7e49cb2e1d1ad43c75c18bb7643157c9c291efc3dac0eab5edfaf63cc01e52b599b929305e74c6d345e1f156afc9cea5e6c0ecd2d065ea550c69464e51982457c0d5fed490fd82d4c6f88c8c9b4c09fae60e53278f7efb51393a694e359b4a187690795bb3b06dfebdd15d06c6040006db79f75597dbcfe888bc5bd044b2f317a9a79e6f21020ae9e509c5f2137d4e973e000111a8c1571094df1ae2f0f542ad58a35f68157a87276967069e4ddfc4c9586fe80855403c09bd9b10fddf436e8731cceed4b2efe897d1bc10b457b04bf6c881973d073938fd7bec18ca1adce392c0444d691b8eb72132a322a46d100eb43b9120b0d4d1980d91361069a4e6a620e1803e2563e811333e8a863ba2db5a3b4ccaf7ffd543aae3d51b0bdf88f53845eaa531521fdc5f0e26fa85325c208efac985099cdd20d0c092a4f972ddff3a7f8d47c54ff396330b38e9f8d33c331a52420dc1066c9c33295495835973ea5461d0d5185ac31160579e169da9b6cccd371a8b0fa1f9d08395ea7c9f95e3e522f2e6e89dfca48234a54a4d4164ae6b47dda7fcc6a0d852b7831cc51ff36554a76bb7a6b15eaf11019ddf0773e631b266fc52a9194822ea6172fca59644c0cc644a0386a6352567e5d1cacef3393f4c1976427eb013a91d679cf5473817c5e633b28f8a1fd0845f7666dc83372adebd794bda7d888d6766f8a6761300947fd5fed03d27fbdd31efd732d6db79cea9b37cd63225ff581085b4e5f33b989566e80a5a4df56bac9e10e846528f833e814ab644a9cad1259cd2a3ec98c57de0bd60df1d83acca4eeffce3c2ff3d416fd16c984db4708b0e52a0cdbe032f80e444e6ce889c0d3bee1140c8a8fad05cf79adba416f3c0a8f31badd74e4284535db8098266f033bb5e16b0b3d987ebd33f6fa28b09b3c3bb7e528ed7410e808b930cd2c308b396d1679e6b378e8b9951ab3f6f5f8d7a1ade30cdaa723ad84845b9717bcaed9ccc04dd0880e92f4449ac6c58c0c33fc40adc3e00e5506e8e26af37fc2263b31dd0ede115b4074c96cb395b95148d61b2909bc8a1db600b45f7663810216ac1ff39e5ae07ee8e620d7cb14e64349f47302530412d7de994cde7c94912083abea3110ab247f92d869203efe045a2eed2ebd53272d5c71e54227b59ae1b64dd7688e723eca45403d68fbb79eaf0cc250abea45d9f10880a74f91f96f646779d79ddf295fef673e3fc38895b33cf9f120b4393fd1131cc34adb3f6bfd5fcc3119ecc2512b8cf6abbfe48d7c6af8e46f44bec09c0988ee781468e715eb13eaa2218517913bf04959a002af763b7c617250516849d4df3c1948c141ef2c59583d13062826591422e02105175e1fc764eb2d5789c7dfead9c16c39be3ef90417bb3ba1861f780b0458a9aabcf9b4da334000bbfe471c643dd471411b6be9eb04c70451873ef4ae9b3794f02b9dda7ea871dcd7b0872089869617b0793c1721ac87ea2dde66a55b9135fb463b120f8952b8e7d3d6334a3dfaf8612cdda2bb5d679656e5cfbb5b6c24c2deae76996386b94b0b7f411748b53acfa79f9805fe9b0ffac5232ba819f8ee7cad14072de40a9c35dcfafdd4aa674ab59d737d11ec21bc9f95ff0af309b221a02873d47354120b307bb599d5aaf9b735744c3c6b884005d5487e8b648b09562ba7083cfd1201dc6f7e688ec87765fc2539d2fabeba8ae849fc1125262be9ebc6a8f6c61168d9139aad4f10b85714f297c54a4a4c3a9f5f8b5a30923ea84bf61759eaf6f134f32dc85777dd3d05a0f8786992b37629810664cc392914c10218b6bc2bd6361ee0e0fd9fa5fe6b4acf56bee518748635f23b564ba898fe58a897188b5c4b3f01a42edf842adba147b3b1971dddb02f91d2af8ee9c5e1b544a57d6fc2cddf735c0ca53db280ef8c46782325773e24d91b659c666afe1d9cf7c79e20ef7e6badeb41e5036ec87730bbf1d687c88c919bbe048d471f0e0aa8f92afa231fbeb17c164750a9cd53523221662306e250286e550a5fe3990a27d7a0f039f49e7003db1b9502568132c465559489fd7a3801ab627a7ab30c5d92edb8dfa43ce9c268aefab47b09fe3551f63a47ac35a2e762c471340b2c8670a13c454ebfb17c52c7b5451e0fa3491bcd9cc697c30a7f4100e1041e964f4d6e09182e4ef9997b1908eff38bf457b63483fef3fe1c6178fe1a038f761df303c7cb58806daf77c3fc76934e99de69add57078e8316a0452499a4444186069810a4a53ad4c0c5b42dad76509c513af49e70dfd19827aefb0de39c95e60d3a98e39bee33ae1b0b715a2dbff58c4d7783d1b863d257e5eb6d169ba83cf98c9cf314c2f1c8b4c26d2cd049ea12510bc751bf64155e5f3225c4ffd616cd97ddda68f9d0fae9e462902b458ea8bf40e566b62da21708ddabfe093377501762a90c87aacdbdd5d441037128165e5b5ffaf57ae0a323d55039541af66d454d0b654110809295ba95be7d4a8765ab6d67c6587e120454b35fc8434fdb9be4ff64af45722d47f59a51dc26a12bb3e13e3e08d15b8db37807e2b74336b75d7c379dcd5f069db5bafe7dfc5c4583f0f82dda311ca431480236c720574c20e49b032ad243999401318c3df48c4ef493bcf3872e0b7e15e4750f4a5c476edf55100e71029e0e973fccf003239371a871457a93e5e724f8da409b2919f95cf892911f1931a2221d619dee6bd97ea121da9bb27fd784dfa0ced76cf3de444af7e343074d49ea63afcab86fb6363e52a2f70bd797fbb8f267e295526c26ba041d93206f09174c1697f7619382bfbae5ffee461425b2efe91f1dc96cf34ccd9c4a31d23ea4347cf65b9b32b674f26b4ff61e2ea9b206021645b3b4bc1cd62ca25d1e4afc0365ffe9109f8e53b6ef122a40094dae5f9a1758ae639d5ed2f1a3da21467a18d5d949a724ac8ef6022675df5d424d277f4e5d326bad195f6931467acba0381b7e8d0973ae450c5722c2381c789c38ecf42516e5467dddab086e9c705ef2b40eecfc12e26ea77759fee3e3f8ff5e5448cdb68df146c71efcd1d93b135348aa4cec732623a1ae5247662096391461d77abcffcc8f7914e020f51c1ea33ad61edd48867f95745609bc2dc98271f87e43df58784e88e48fac509755213c363f8ce1e4c294ac9e5626436d67980421dd98acac80ab2ddefbc9c9796e1cc181a81b3fa0c2b5f4ac00d68e3f6069add634542"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000103080)={0x0, ""/256, 0x0, 0x0}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000103280)={0x0}) (async, rerun: 32) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000103480)={0x0, ""/256, 0x0, 0x0}) (async, rerun: 32) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(0xffffffffffffffff, 0xd000943d, &(0x7f0000103680)={0x3, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}], 0x2, "950382f6725b97"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000104680)={0x0, 0x0, "27e3677a225f0d61f4a2ecbdae356974b1365b4403d18be9ce490501493555c5f94283b32da8040469251e63aedcb74bdfea98cb136aa3496cedecdfdba273ec4c618416ef39ce95d65346d713deddce0b75d68ca0aced5458e53a73bbf68ae26cb887f5a1508dc13227a9eb29bc48c011efec8392df48cb999b14ded44b5377d5622cf5102b7371f1d0c6f9e6e3791284bd158bb28afcb859292f6f1d7e0191cf4f581fcb58c547bef487dcdcc64af315e300c27c394447b7af7d1b26c21856594ba5376e0f07731cebfa0fa721e00c21151e35f4c9917b7ece95c395fa1d614d9d9730c28a953f76a89a251d4ab46561187780f82c149077ade73b95cfdc9b", "66f76911dea4f206956abf0ab37aa7e6822074786b9e2b9739549dae6bac11028af657d3e6c81e865830db9edbfe2906097df7415bf913f31a38c29f544d179ba5cdcfa9a4279631ce30dc3c5162fd157ddd02a8c3f1d38217b1d26c5611fcc2955e4c68f4b422731539cdc9848d70f51dc52c4612238669300dca0b88bf80e908692a5e3569c5e0d81f1ffdd63c1fe0bf629334d829b9f431e7aa3823691326fded979f280fa5e48a20a469e23927dab2c56bed776054937e6ba6b6e1c4b8274685309345818392472091e4647326775b6a62a783d49f3d5a9db1ee46b9c7c61ff23f3db09eef4aebb5afa5d5eac643fb97fe04b201202ca6f7eb054fb1f35e97d1c431f6d2d1e53c8f82b76b058a0ff578bbc3bf227fb7a46b5ac88b2a5c76bfc281da60790f007e70a8d9dfe1313e96f7a70105e586d8c00e42ea1247a0dc6dac6e1b35a73354c2264d2044ae9600f62620a64dfcca66cd7528356334c97837ddeb649d66de90304c1b471d8d6d40c567e935363eba23bd4a8f92c3c7112423c5ca31b03742ea170cc516ea73574cfc730d73cb2355a5bc7a920c4a38c036cff931290a2b7090070fc57944983f9fc6a9daebf70e4a18a080ab87d59bc11c7b02532c1f9f0a3fbd3bb466c8d0b03bb926cefc2de0956167927409b9c79f1b8787ddd399539b98243e17c37203ce5278a9eef797d9ce5e6869b740742543e60e535d46c3a788fd086d41df1b33b1b5dbd0150ce9a64fec0bfea7d910b7aac27d057f2edfaa1c8f5fc64c67e9a049926a8853fc592e0fd296798d09277d796101208ce678ae80acf50bc2a84b2bbe21d599c0fe92e38f6ee99103e9fff1effcf85f38fe4b400260d18d7fdbaf0b1b4c98ac555702c9fe18e5cefb87f42e90f8361d3bcffd55c4125499e96b91ecce1ba11a07262bc42a5c22d9ac1964020efd1acc1cb19b615424d698b7e46e474d3e13d2a1f41addae8842da7b1b8b405ad5fe1312cfa019815f526d4a2f144690dee2a8496650a5083a4d88463d2649bab8a4e3850465f3c06d8087904b46d9a75e7e1bbbb5fd5348cc80ce763a2c8529cb3f104916073cbe587f0cd39a7a9cfff5ac7a40d4a7173a0768ae2423a3d35af83b0c645a8d6c38960d9de30c4c362f062ce0ad02037fe8772d10b59221b0676884aac6039b4978fa88451e63f3eec074f9ff243fe799da27a3e11012593e8a55f299cbb7d164c8ccfe6c496fef8a01e575b322f821cbf7a7cc2b550b060c82da0e1a87b5d112c97c888da7db68256c26efde1e41b5832e85209784fbb231c93bc810363dce5e18ed8370f9f0c7fa0204243a736aae65c72299fece1b1938c0851bff363e5af96eded55d2299a9f880203ce848490d5a3d3faae2000df7266ab3a3acdafa9d928f1f47129e31b07e95b62bb1575027bb987ce81393da8306107758af44c6e16ee450db4648bad36c25f0751afb412451555ed92a93a9353bb97bf8ffa65f73b256f8431d15ecf4530e131f0908913b22cea02982a79cd3ec52b68d0ab447569e180cb644d32796ef03e01272a15ddd69a7e1644085938937d4cfc75646e7e67096e294e4c02c450bf4662608a5b55e2151d4cba54659269a4d7eebf5e102a33e5a576e18933fab4f9eed636216e1e3e0e55ff61696cf58e2876d467bc496ef362f2390e33fcf2770529ed5890fc8205dcd027f865af9e2b7f7fa645d39beaab1c598b019da7d263484583270563908581154add0e416c739f0b73d78b8469fd26d91d9934d92fac5410bceabd205bfc8eeb2f9f32b0233bf7baf834863f2b17cdf6e159ff70222d0442349f9a45fa490f6262a5762d3fb2d41230388e99b5ba6b1a7cc235aa8efc5199b234265f6057beda381895a3c35ae0cb3f7a40d6e84fb904e27b9cded767a4bd0d148c0b19a9e6b99db2d7e00adbfbb42f05b89a8999bc227ed38daff2f882c8049082aedfe2177ff91b90764ff9f032f728a62322197b85e71aeb1929055a0795f98f8e5818cb5886823abb4f013b722972a66b22a5cdb6ea414798abd1956806d6abbac30c95dd305a1f6990eb1c64538a040e02de92acb6fc8f64984e39d6cae86555a8aa25991c40625e08aa80f9373b925274cd1d599e154ab4d3b345fdf955bb96dc9cd578d285d8f00c3c300091c04e8a064d8b67222f3e4f3fd36c3a0e9f6b31258799f796de3ba103ae2565f238038ff58126ecb844cf00bcc7b4e7e0c4cd1f7f699e65b8acd3151cf5a79bd48fd3d0389959b34b51fbb0bbaca6437654ff435291d787861ba9351f386f1a380160ea2207eef46df24e8526cd3b0315ceb9dadfaeffa9b5181e1103be8032580f0689e9f551aba1ca7a5fbb70eb322387b1c9e55ef37e1e1184b847fe30c2d8413e04e849a996bcc3b4b41a4f3f3c9953887431bd23d40af29a802c291e81dd7b351a11dc6acf2b7c5cd3ba40218ed4dde559f017ebef571b5221de3d16288619089f998e5f9c7aa5846d030299572f68a9b09083131cbaf7824042fc2a2a55b5af2a8a3f9f1e871682db7701662d5932bcd09fe32db35f87cecc6abf43128fd22b55dc31e1250934761e145bf3b88d07ab9405769661b9c6863b744bc338bf6c49271ca632bc0efd75f2b16116647691a03fccc78479528e122efca3679e880c941a5b1098b8e1759bcb62138aa3b9d2575b33216a51a9d41f8e37fadf6b3ba4687f854ab96c6bfac2ad1d554842767e6e76d198cf89b4477e3dba9c4c2abc646e9a199e53e5e9aaf05c2ffcab4de7645c57aedef3374984b3cf9016036e3251ee121dfddf92b788a29d78bef12322601a150c394c53dabbf2acddba8328a78b0117736b9dd2fa32348b372834bc12159750b854e6d61eb560d37e4e74b2d9c5c3cc566e4a0d5e02903e11d854310101607227201cdf68a67013b69375b9a76d40bfaf328fe4afdefc571aa4860dfae537d3fff1cb0d756b828ec895452ae41f5a105122d03487047b59feca39cc8e7af315c3e21f1531d7ed823ef171b824c51bb67d482ea6a880dd51f268ed663de33db9a1f5183f972d26af7987797cebbd624cec6a1e229678e4cc45ece2b03ff8b3e7d913b062094f0fdf92b4f12cc24a4e97003b5c1aeba00839da50efc28fabb7b13be125da0192e00f948f16e34f485e8e5f9dc361298809da8ccb29293cec2ccdc10d9794bad599a65c08f1492228ea041a5dd4fe2e451bae1dc8fca6b05944e66666d9bbe48eea7c8967fb1cfd502a8faa51eba399d3798b22ad7b1e166249f5798a3244fff882cc57305a31ac362b824e91ef384bc57a3a63e8124a878cb730d78b11e6f00200ca9420cd1c26f374c1624f9e45a2f9d8958151fc64939ddffaa3743f9b3c0944be5628e49f823da58d5552f0a62bc4b35483a51ecaead1a5bba99afb7e3e675d3801457b6d38694919fb831a11741d4b4afb6d400674875177c382cbd7e91ca469181925922c791fd08e1bcfbf5f8f41c140a91967be727a1b5e84b4aea90ddef4dd41ff67cf23c612d1d4aa175f9d6e42231f32c0b5a6a381afc9ff7d45390631d56c3654cda56bb2187a7a039f7d72b70ba45b67b780afcbace9e78935ac82c64a69f3b91b1ef35e74ba5cc366e4eb1a7934b5e27159a7c96ead8935285d930991261bad7ff47457e3591cf9cda2960b7b48538c91c65b27bb9ae5c9f43e6257f4e700b9c588a43a9c756e1c19249d37f2c8dcdf31ce542d164915bd0b13cd46d40614d63a575a2658ed719dadcc476686ba59e590e2afccad6232694cabe869e8095ec6282b8144ee297fab5d2ace67ec97537652603d491bc6808f8cf69c03e030983415dea7a29a93b752ec5476932341d5a40d1f9ed3b2ad2101874d0170b2b2614e3cda38e7538d3b5d3f766fd15dbaa83d5b0ced6807ea043134863cd7d9ac9c2e8e27e38febcdec85e62a055c2eb82c665d818d34c5c8ec83429f5d156bfcc11da20627bff6f8f4c3394dd66d272a2c8364d2d02f4faa945e5646e47f998017f52acfd80767321ea2cd32282fc4ca5f48999cc245658afdd7cf6748e2146c8185b58bca65759089141f0fec8020f19dc1131a6f0a916621429b03a116afedd5da2f5c5dfe8f01fcbf90749efff44f242b38744cd3b09b22718620889e96e0fc062bbc74fe6e9caa7e2ec6e64c420a43e642ef5c68d000007fad5de55288a9b38a69d721af88aa1c3389559c2d65dca52bc1e5d94b26da3ee35c2112dcbc17e157e1a2ace436b3435cc63aa19515d18d4c1fd285d2825efb69b48cbe6116d927d096a416d8eb220b784e8fc1283519a310a5df9fa712b795c5a29a931d65444a48df7d521b5095784c1284609dce85648f902c118644aba7add10832b3af2691ebb72ae5a1f3e9b1e3ebb499079edc50dd9bde4187c156af53ebc373fd8c0b2f2cedf55edabfef8e8d45421e44866fd449165547675f6d19cec091c626ea24aab8d00668e3c4ee398837861e1a4b55c04723d725bc9c0837250d338e30260ffc942fbcdc303ccd84f0141c3c03e6fa7d9dcc86e4308954569ebc812edb2352b4197ee719e8de3e1c29944a95d82b5588f548bdf447f8ae671747469c60337aa2df731c1c3238a867dcbfa397a98715fd824a817f4a56691c9ff8aea4999e7497a567a127740c020a588482e49b087a4f7a40d3ed229f6ba1887eea93a168dd5dc8ef4d823a4aca0a722a192db66f3af857686e5114b871a70f9923f523502fd39159c91375037ff8d71430e9906e4753fce89482fd9c081af67a289c72a42afadef58f6e3d45bdec858de12fa2b5a8617cbe7b3a2493ac62c12c92578a31478d765f4e053e604430430c96fefd5106b80f8bb035a89e594c8fdf1bf5b7aa535afb16fbde9ceebabaf457355342fa75588630e1f241bb2a9544676d0e40707880231996fc69c10268659e99114fdcc113d723ddf1bb5f0f80b73d455135c5f12fa85196bc8349e56e079bd64680bf57a5db53cfffcccd5b29ee0d38322e9a731c294ecf7fdde700e74b56defaf693e25aea94b419f7741c71b7b679d57d34bbf4b941627365dcabaf2ade0b07f98e0df91fbec898a90479bd66a34cf5e9ce4092da0a055297c80bce7d4a3ac91af4dd6be363dfab95d40ed9bd53c2a200798627ea910f6d7e1f978069863b50c19c573ba0be952670e4db738f2010fe84f70ff130fa00868aba1be885458d30261f95c72d9270c449ea07f2015b4f5aae13e2f26f3a71522992504d384468239493fb30f6d95883aff38e0e96c7db32ef4e1992368b8fbca22139a7ea8685a184b044b1968850fe4bc6dad1390764bffb747779f3dd9cc1b6a98897b83d5b6f9c9b9648538f8753588b710a7b3b1a066c80bbebd59830002c9a425b3e0a493c19290800e8a"}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000105680)={0x0, 0x0, "744fc98512ef35632b74044d9f0aa647a109bd3f720e020fe82657832f04ac8412d37276beb813fc4ae5b411609f3bf20c2404025642f87a82ce309e5cd388442ea5342e7ce658ae1e00246c61b5cb989e72809b53a6f88ca10e18a518841b6399168280a719280fe7a6506481b5b860c8e9bce82a11a708f3a7b8786f21a24f9ecf7d22aa5c367f56e0d0c01c59653c50ddce45c35ac09e052d0aba4b87994ff8b57e432ff49bfb6a9a14c37bc8963910c5fef31ec1227dafc77fbd77a04db69ce780e21783678d9498411de51b06154182ca6a6140cb9291cbc5e18d720cb3da20da8cc415e81c5b80d8097e7fcde8f4a31402f2aa1109c03cf729bf05cd24", "83205eef629aba17471293e42d97546dd1397bda378823d6133e66ecfa40fc1f6cac69c31822da88331be1e3e3233ac3ea8e0cfbd941480fb3c9c81507ea88b02eb47b189ba57f1451d6d4dd9e098729e00e4a20f78b5cf91c5ac0fcb4e80eb97477f09e6ceb978351caf24ad9e1d4fcd4cc54da7ebaf3f3699a8dfe6cd0c4232d9ead083f83b9e9fd891de827fb3a79b833f93aede8abd122be38f205d71f41f8c3dfa9c1e0ffc75ff4311f92fea58e5e918d34014ef45486b1ac418e53246a45c6879f9539f07fc3da8c8050f814630f85f6f4b14c89ed02e0e923edfd395a76292562533ef373d5b59513035ab7c39de001189236d8e7dc1c98e717509ef3bfcc47cac73f296ac92dde5c1ce4771d1797c1ee2c5e2a3ce9e2a354c4007b130d35460b2b2fd138ed3f6e1956def838dbdb4f0bf2add2bb237d51d171a123705d4721b423cc1b5d29d0b005377fc85f027ec1f7c95b3a2c6397cae951eed30d3cd044ad4b6c90a9982700079dbda64e2c8279ff8792421bb35676765dad534aba8cdf5125646f055409d6800a91d5c77a6f97288b0eb45846d0116217cc3068f59f8ba759ee0d1833d78ad1907757bb959ebc6c4acff9a5e0e30f95b9a881263677b99c1ca79e59d63c6a0c0977100d94576e8f933df168efe57cdc1e1f76370e6bbc754a921d4a095c452674cbcb7f1dd1eac3678048aebe5d4ee50cb8d8fbf3927cfc822c4b9c62a5b3d2f8dc4f921280efc4798fbfe6e16a7a95f804655a349aff4e83c59f472204e8b89646cae6744e01d99df7f5b2c55f4c22f377b985fdf1c36b677286db3aa34b39c615b5256307592a7560c3441c2e9ed460c2174c80606d23918473813ef77bf880a65ca446b8aa7b356e43a3e8ff13fb489add12fa96459222611b763694395a9a98713ed9c687b9926e7322cf1d1edd6a9341037bf58832cd0a23052e1baeb06c9ff610763fbf8333b5e72972f3d33eb99f31cb32853c2858e2534b6fd22ee9768ea555f38b5238570f9b08f3643ad7ddce9b3dab4160ad134c26eff10dcea4d758992e6a07a6aa5df6633f08424e4bfd930cc342fd7aaba46748a7a2f07de1e4b146378c80c77d4a7781d24e0ee741251c469559992085fe690f83c06eeaca8e0c6874e02b8eb09a0db2a2280c28d6b6d0a0410f9ab05417906aca685a734f7daa3a5665fe0956a2405a62f8d38b628a650974f2aad120699eca8d72fb8bbab9d8df2ee4414f9a82bbc15c63e14c3ff9fcb969d98dc3d6aa07a630e3571869d940d3b9ee4aa2373ff41d3ef48866aafcbd08ce86fb7bab86aecb9cf37f23565d33f243823dec518e0737879df904ff9e3fac32e1bbed0befc35f805072fb71fc5fee5e4dab85c4bb9afd06a1e6488c96cad860e1d2dc789cc715f011c9e44568b29516ad426aebbcc7a46dbf036fe67108c08ecef6afff986b0e96169313dfc2544ea262ff6ac57a9e0c288bb285378965d77caf451de325e9d0580eb91bf1b93529dbecc729b3c2f0d2fe683d4b09ea044476df9ca0d942b757bcfe6e329ca1814fbe13f95e17d5603e2828b7407f0755a52de9e1fa8c83edddbc08aba5833edaf81bf5e16f4ab02885acf7ed9083be1e3314c8e38fd35f2203e3614c9ae285c3e22eb7db6660cd9fb96c8f2f790be092c5d1abb1b509aac5d058e13fc22dbfb1e26bd6df26f10c4af2e8cb91548419748073a4f0617ed60189767e19f184a6fbe4f3ea8c36237a3266487f646d430cda72e9f4933ea726d610349bda41e2240808b227698c6ee710e9e40d30a74fd8a9b5110b357537f6e0cdd3886db3db85671937ce8d7ae3b58095808aead1cd7ff42081289f27c2a7cee97bfaba9cfbc52fbacb08027e74f3c1f2c4bc4990f8bf5573d51d4d753c00e9dfc30b37e122628333af7d8647fecfa6f760fe4cb92f30ae4592bb8ea4037e1e08e118a69824304e8d32aeb05f450305bf2bcbf0edcb9f857051b515666db4bfab451be1b71f902692133f56932ae012c42cef892250b22605f1c967307ef1386a21c7027ef6b61be92acfb508574a0428c82b6da14d92c826d516a8008f8fd6d765e4564c0d5cf8a264b5f38ae33a975eed57ca58ea815f64be81ae62e12a2163499f98ce4783e093603396ca7e9b431404bc01853d9f2cd68faea315121cb2addd6431870483cbed374ff887033f2d676acc8c6d521b329afaf3bb9969a61500e5f2855bce2c6d526e2dad63e35da4d48b62434632c59e8f9b9de47a1b3c08b1048da18ff8aa556b211787383203a4569ec77b1e522a231201a52d85df69dd3d0df3112c7c847f50a79126531fa18b4793949ddd228908cb3d78cb93842fd45dcde4e7e1a700c9615c19dec9295d86bd148366eba223d49d71b0e02fd6e237cec29a2226c3d201e419eb277f05de306093d80782864c5c49d30cca418b4527c13174d1f0e0278ae41cb785f2b0b81583ce5275da0a2b8f0d6b4745c6ea053d7abf39f2addeae638e1c99e80f3081e48df27918a4aed948b13f11e9a075446b9cf6437f2e45ef8298ad6ed494215c0f9b796eb9b5a079033f9128b0765d04c9a3e583bd11134dff25858629df139c0d5d7f4a60c868a89f5747016b5589b96ed96f66a3256a3ace50b7052952612b25edbf3534f068a2606777568cbf2f12d177df0c7b57a3b9435ae08f20103b5ffe5ec8f8c5deab70a5007846b9e9fbed8b07424e3099a25143d3d33ff64087452198191774e0a1fc749e7ea98e1bc9123c4ac94c64894d2556831af7f8deff7dd998cc0f545156711894a0a78f2afe24e6127939712cb3d4da8ef7836c10459104528d22619c906697dc99311df3a76f645b9fb00a946a31bda34cc1db70a7fdca696a495c436004b4555783c9660168eb0654f5f70f5acac37380f41a41ea796c34c863e5ccc10d1657ae556e6423f833747fcfbbc611991b67f485b294b0f563b6fbb0dca89273b2bda634e90b0a34ff57fc1818a74c483cff8addcce9f1ccce729571ebe1c597115dfe5a1a3c6f14d80352f0bc3973338f98b66fde90fe5c793b436b7acd72d5543e4e9c2889b75060e4514d53930b19c3132d26aa9c469fc216698594635f40e1df71b7ba117c8ff62526baa66c8af76795b605e2c01f6d68db11446ab6b470375829487dd848ddbae78bde3cd0d6d5c2c025c6fbafcc896b7aa6171693b5bca01706c217f03f1aeed2377403099d59c7f4f578fb46502d2d79de069bacb73fce2f542cf1b252dfcb8d048c6f201662e9cd407f03cf4532c97edf5c787e5cb71396804b36a686123b9ea89953c6f44bf2320e33481f1c387c02c7f4a9c6f2a1a787af3a4c9d9aee6f23f0ee5ebb56f501ce578e43198ceec2f2d015bcc68195f97b11747e412d5d69c5b614a030d0472fbecbd3ee3c987eb051720ba55db2767321c067e63c95c7005fbf017d8d1b71fbd9c4777b3c26c4ade02fb116bc3cab700a15987b3356b479317c0dabff9e2e9198e49b0a1c3da27db65a5bab0e0eeb5fdfec43c411d3f0e548e88c9b0d70f8cb18e74a58b838473b83df51bde8417bd53b07dfaff69c5791aec8d7b0b9e5f8907356fd039e3661b207e9ab54a50511f8459e8393c4598d77f7c5428fe9d20ec2e9274c0bf12f2c77989c58dec6922ff68974cfaa01f32c4442b850882b5236460a1aec5b7b378a65c8c00a36e42c8b288b642c27ef8e80941bb3d18814f7b045f4488baaf39a6458da6c4c31bfc00b4b595220f5458278f416e8c824179144097a6f2bb569bb9a9131289ce1613ee79ede143a371234ccf32f212dd2b7f7cede67a673fb9731f87efc109caa0f00ce2fa10dad838571251caaf84758630478aafbe8e1816c0024270ea368ef7096af8b90f2fed7793016a5cfa617951e429e83154ecbd3c36e4d2c29b4625a0ee3c43619b410d40f09982c8dd033e5c901058f0261b17bf6965e6dfa335d3a10f9c6d8022b110f7f2e2d87d9c29018df9d6da7ae4b8144b232c269893c2ff9435a0a66ec31a52454ceea140377d929b59b54ef1c4f6f7f768b5fa7eb59cfe33de7f24d4dcd31dd9b96255c9c3cc413d33df4801d035f73771cbcfa72214f7e4053c6df99213a3efdd266f15c67dc6c4821a9d88bd855ef3a774bf7d373604680cea4db89c5a5d13c7bf63839cb7c62d5ad3019ad115c59534bb0cfba24cbf3c4599e558ea2f2119686a11b6eea6f570105029c863051c70f743318ecbfb40edaf8fef3bb706623aec969e9de234399c0b7c66e5763e03a70f8e06817db4c744895b5bf447d3507688b6fadeb2b31ef7680de9efa91304dfad4b6785b3ee7ef2174720fa49bdde4d1ab0f90ea08de5cd83e8b5ebc9d352d4c21abe3e2346d9f27cafc4c94f10f9217a3170a5116d73d693c25234e368a5d5c9b0a6df35434c16a31718ff7fbff651bcd1c0d6fc20a88c8367866a44ad1d014fd482d4648916a19e535391fa19723cc0594b3bfb5b138514e16b007e62c2c6e2949ff8eb546a2fe7717334c5b8357e367bc8daf24fb8b99b614aff676212109c6e0bef94401608f61d5bc7dda384ce8d4dfcc828cacdfeb749f929e1a4ce5abfade7b88ba1c2e180c505cefa726964987684a00b5e4cb9f1f017863e626cfde485b9b127a04a5d7a756a483929af26f3dec108944ad046b17741d66b779cd3860096cbe88d540f044af80aba814b6be20f06170a2974af49a1e6ef8851c4ae4f6d21e3453f74ff388c91b51bb75af5e62c77b8af9ca8be2c54e129b5fc4d30165cf41beb371e6212e723fd75e0a9a66f4c7973e2739fec26fc074f1a877f6fc7a3b1741ccfd1a828f0e7a91b55950fe418422efd6607007878ce385a0e54a558e6f1defefaef5925bb403e2ac0a80fa593e4d7667defc3e53673483bcdaf9e3420ea7e77e0839738b04617d2754bcb72e237dfbbe57d59e83e1f2c0dc4db068a4ce51eed10aec33b25438b3d95849b812ec3be36427654fc2e4db354597600e947e3cbd60fb649a43fb8b3e08b8624bd48195678fddda5208c004b6dfebee3d200ca6345538725d0a6c02bf58c80f53d97c77562de0d646f3e136b1fd6f242af138dd2025b07142f38ddade5ea8cb144edfb14178ecb806b9f7217f31f6e6a83ecade57d66e12219785928fd1662d5e7112bb6cefa3e76600cc6dd008ab056e1d6d23d626175b73357f640fcf8d48a10b0b26201f29d6580dedcb18c7e3a672aa2467575c76873b03d320d59533fded6d98c033c72c1831b77e02396d09671c05c732a1ab28492dafe25e2f8222f6c700507e6af106d16fecf1a9ac4ef868b9005c6ac58c2871bad4bd019357ff1ab6fd588e069f095b3b49625b1fe005c67302117bf0290726dc67f53137b89c7ac7ca222e60b8ec941baff3f1025f"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000106680)={0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000106880)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000106a80)={0x0, 0x0, "1fec852be121b96a5fbfe99da5a4bc3ecef6691cfa4d77f71d6a598f68a100e6c424b804fdffcd7cd10fc151aee6f7f42b1c2809bfca3b557082ab336f0e4f3c7cc4e7135d909b00b6dce17430ee2c352104ac341389c4d9269316af98e9d9ec3616222e1c2829ea05aaffa48a7238f97b45a4ab24b3a4a3ee1d243e3c29178451051e1e96791c18ca65dfbc15980c2481288eb4995b415611f1e3085377f71d6794bbe7041d73bfb1e8e163f50a41c7b7992dc8a23f30002f44edfee88a81590872129892271fa08391411563f701eb3851bdb5e0d0d1421524bc8213d2f6bc1f2ff7409e062388947df404382885fddc759601785d436583183c4da22190b6", "a418ced7323880a007ec84d6c85e09be9ed2f1b170149d3e7790d720697f6a5bb75de6dd328b05e30c0931932734e811b223f4e35cfde37f72e1281ac47243db7efd68cd782600571ebec44a9dfd6e9f2ebc06acc2d7f60da331dd11206694725d8329b7633cbc968d0e77c508394bdc40df7e863ca053cf18b5f0bdb0514e87055037d66bd8d3b739f73845bc073752b025a471ec9c376960f8ec7580a8149c348a198649d1e408a8620aee1331783d228235beef6fb22e67c191244117634bc9b3fbb63a7a1a674c0ff52781ce454364730eeb097062d56ef93636216eda2d1b82ec243f34725d82c19b521db8f844129401f9fc173055cd9ece6f68a36f601e31d8276b12ce2a1f86b31ed95573d1ca6a310141f31d58795cfa393fbbbbd976a312cc54d70ee9a718105e4d580d333d0cf657dcf1bf64b595a1875dd81841f4b80d285a49440fabe953bf3d31d78c638393f75a8d34a826088a336a89839a814f75710b81c193b247ed1bdf17d0b16caafb20d4ce964bff2f0f5d892df150da40acfa584b62a57efeace5e8cd795847cf8c8c46ee840726484774c181ee4ab040c0a428c87dab994f242196855448afbe4ff2acdde893a1913cf7252954a6594bac20c39a6f56b8d9f98b9f86bc0e6f7d589ea5579f59a376267a7986ed63dade8f26b66e4f1feb07065bb84afc8fc676c4423631177bb0f9b8b8bab174df493008c0abe5f9944ad7e0962602d137035d62d957b000cd12aa080d264b670c63f289d184d23532bb705ee572917870b9467454e1789491fc790fcc7e7d4f4dd143307eeb5ae5c49cde1b9f64d36f19b952fb73ac738691b2cc7909b929e97ca5325bc62fda2e3c95aadf52927348216d7200e586a9bf38248a67579b4a73880e52882eacdab2a25530c23691723ea473ce2408f83ccbd486a61871572ca8f855af9c04b24645a2c5eb92e47230201d06b7ed837c1ed520dd776045882bf7b1ccd875b32e687476a21856453070069586d8d2e8c01769870ca997aadced0dc15fa653c477790fd851dcbce381b0d99d897e507cc2edd7b017d77d8603cde6311adffc0d131a39bfdebbe8ccea33ee96627feef8305d136155dbfc21ae894d9aebfe149c5371184ddd08d289db6b8fcc7f72dd9242e7a8a42cf2a77f534cbf94f3a56bb48854d6165298cc2c8576c262e2d23d2c961cafb3626b4d8343d3338d4b917967857a9da61866eb535e2dfab4e6d6339cb94c33a8988fe41f0d4d4dbfc57f3f54bd9492675134e9b5afa97db09404a9641f6c3f6738f7b71f0faab4335c81f77f513c4c77bdd96556a32eb80c7ce9a6f792e7ef6cab4f1ffc6ef5cba16c7c29f9043db28867ca93405c59dc635cc8f94a82c4b473a5b176e07222c1e0d632d32a3423bce785b88e7f7856465591eb3399b2fb80362198517e656ffb89de49237844409971ff3bf9140b581596ef69d1a9ebfcaeda511bb20df53fd948e20f9fb2929fb3cc3b4f679ba86a98bc17449db9f3f2cabbc303824c0fc9cf69aa0e999401920694ab324e60c950dc850ab7e7a2e5a0d0860f0b5187def9550b92eb93e81190b0f98b43017486140a37ebad6ad0178648b0502073119fc6395ea66fa8b86eef7fef18f67556b186be06063ad420cd51258fe061016733fff74ff4c48395bf0531db4653961cf27ec36d8557f4a3f3d8cefe4337e21f7f8cab5e3ed7afd5d2c02484178ba2b170b71738146d61afa2c376d712cf212f8acfe1ebb6e4f6a62534670aa368e70853b1c634affa3bcb6600f7428e9e2ae5ab214f9b042ef0e6dff89e4621b47dc3ae868c0241cccc9f971a6076e96081823f020ae26f651d00115f7250f3a861e37efbd46020b4f760cafc26c075d2fa4489b62cf7636b08143203cb1c987fe703fce86658e8fa8b5c81d8421d6c29b9512db081f66507c5fbbc1e60ae42f8d73bbacd90b88a80e4c44625f106d58764affed3aef636ff4abe422fd325b823a94f7d2e92b8d18402eba01794abe1c076c9a3701ab2ac71ee5d75a3677727ad8d86ddf561d92232f01ab16eb8934693d7d9ef260b0ffffebe7a1e907ee7642f8a414c7a419135727369220568bc13fbaaafffbe76ddb853c21f43342bc80bfc85d1d8088075ef60491612649c9f015b87f6b207e422b6a67a8d08b377b41bf3f4cd14642dcc84ab2582cece644a9640b1ecb97a27f97e6ac5d1dd49a3c8417ca0dddb1c6666aa68b0a0c5c6d217016988d3063fdcaa93f3de2c8386e4ac790352729bcf0bbde39704bb10b339edb4e9fd4c73f87245a20b73cf9c1b4a4159653dc4a94346716036b1b0f76bf9b1caf406cd7a941572c72a337763faf12c563c230d1a9790c63ad3b83eb10deb97293b6fb9184d983dc76a18037da6ab40de2f2140197ff625699414d4fd452a778278346c803301f3c5b61a02aa626843d748075dedfa5ef6bd6acfd6006741cf07d8412a0786d9e701dd2be258a1f481b389525b36bdc47c188c748f7068c30b290d63b8d0cf58753973f6091b433fe2f397777a7576756a7aaa4a6d65d7b1bbecfe13745abfc36ca33381f843f2527bd538f222a3d1b23c67e01a68955420b69763ab2badbc071d4ba10ed5d42dd0757fe971dc85cd748ca5cebf452c7ac9724aa3e9fe93ea6b0febd47de3b4aef95aa131adacd200fab3126faa0aeafc9233a1dae87dc7b326b5d67e37c30f69bbed592bd884c84ff43626bf8afd738aba67ff5081adc3468ef43143e8eb417d298317103f6ca17fa6803752c69adcbfc4186997814ef09b765b2d2e90d1f25ef46c6f5ab4ef3910eb8c74a97a61279c683dca3bf806319cf094f607281106d48dfaf2b127be59993eaa6b03c9cbfaed4dd6422952f6cc507071ee06e9ea33d7e0ebb9b79441ae31a97aa72a8ea877d4d78676ca7166a180857569392568fee3be16ea77405fee9b50f8df179ca3feabaf66777fe8f1e01aaf6a9029ff5e9cc01c2cb21d3484332e31f77d95b8213971cdd2d007e024e469f02a321f5cc8f8a42fe5663f71fd63c1b6efba10574cb119f88f9b521e6626fafa791ed74088548dc5d4af42b8c3992d1ad01e861efc89e281bd56e2fedfc4a5fa872a05eb0d845736336b52c31c7dce03728c36fff99e7a6add6d155db36414c4488cab2262b626001d7da65c2b91370cd792dedf883cec0fbe6175272f834f82b3b6a720dd5b19c9be7aee49129dc1632a68af45da1ef26bb636626a3f0cf1d7061484064672a62d2c06636a6b76c276d90502dae72f5d4f6364ee231475b0246f31c22c380ae017aecf7e275f60ff7743df380f25d03fb2c6ee2ba148e5c78352d60bfaf545f32d62efcadb1eafa596c7f704b96fc760cc463a16d505548ba36b6b739ed4657853c0694782559a59d46ad57d8cc2a0aa5f1ce479b3ccbc262e5a223c099a3e0495cec88d74c138d9366159393fd6dc75435d375b6d6680a14b66722d5bf22a5a5700a6c8be83ac39319f56e94a18884eca81d754505706e8deee460919b3a798d40b0f1ac12fb35fc8d6b837b50c5f4859a2f8662423dfb8ede255c9a2eff1f7d969e93afaf07f7785a97241a594681580c80ffb9b0c582cb6956e509a8657487582c775ce0cbeb2720e59db1ee30536be96c488b15b06ec1da40896d3cd4523cabebe18dd5d25613d9919ab2354d4721ee2eac78458fb51946d4b084a15fa5d3aa57716eafae22472ef559fd6f4e292c96f8c41c0a76e1f1685cde0eeec47a74b85fece588436fe064b95f91c16a83cffc98adbae23d520ce3da3fbe68aa55fc0499325d9a2670d1e0e72bedf0745ed6b0c4db07e54dce744c68a5897dc96f9a1f67a6ce0a194b299c8fc2b4e9463f72c280787eca3ed9c6464bfb2b242bcb83330c14851b319bdd799ff9d56cbcca028705e2e80e829add8e7aef9bf099a251d09e5b029be51f9880b6973c62bd172b5304ce819be9679ffafae5e4f2aec9f49ec1ad21818db5b9e42cb6d493901c125e3972428322183fbffba7c3ed50905533b3d20b3439212c4b082e71e0fba55e12b34e54860aca814f3413a897e6d9d3198d033da211a5f7cfc1c90e2680765137007e255f6ecb92b86735ce5499a0fbfd0d2f6ad3669e5f81c88a8c81e4a960baf7322e3cd345b6e370dc23226f6c0ebce48b23a0f234d64ae75b321b6ca5825af74346dbe69eb5ff5196cb424fa72505bd728ce2efa73ae3bcb43c85e3c6240dc0c5072352d381d043c9affb40db7b7967089dc87fcc2073ae14b7552ed1cd418aa74140ed94717fad5083794d0616ef0fdd3f0b9c2600f3120eecf9b7ede8255655216aa5f9c60fb39c466f08529735cf90b6cc6f8c7195599f876d778b62120406b2a1e00b94ae54ce420f1d1d6cd35fd5954dc92e95ab3366f889e1e63a84ca2614492fb7ff5197a25d6100793bfcb652ed6c7d831518735e6cef194ee6640946c40324dd0f4ea4db71739b07de7dbe5d5ef14a95fc88461923fea7f58a683588451b423878901b25b760d421455c9c8507f082f71f487e33116069914a3d696cc0a3660b8baafb6bfc81b1305fe9dc55f2f4fe3dbdab594ef6196b7c4e2dc9c97d03d4d10f87c1cba30100587e83eaa64d4a8333483b3fe58b1a8f00d8142711eadb2a8cbffaa35c8d61bb1ae65673d83167c1cf85589b8b5107d823b6ebca17c28cca00627998ab7ca669420d4fdc246f4ab5cf9655c268859cae8b2517be3e6ae59625c2cd8084b3b338fabe7e268911e43dcd8534a83a2b0ed92fa462c637e427e8a6ad009acf57923c33b6d8dfccc1f958c848b5e8c4050a882bdc40bb37fed6c40d7e1ffe58410bb91ddda8113ac841b8eec13bf5797f018fad6607fff0e081097a45e82019f6749f0d394e3a30cc7e4c6e7380ce8d06b3062233f89ae10b12bb0446536c16cb084a4caad730970ed2595ddb4d73f9928d080277505801b551b090e624e127a39fca67695b4d24ace3922462c8025e4d87aa710375c608e320904a26c9175307fa768dcdcc9f4ac2ed1ff0c7a755e567ea173d75eda9caa4280687bee6ef66dd90bf0d29645e56faa223f4f42f2dd531607c8d5fce04d347b22e37afda2f7263c67819f48c104bf6dab1b69960e237d9d294f63c392567da0f88af3d22852ba37804f01eb44ff533704de999bf1f77c91f8014590d3e013767f32ec80c423f331d6d1f619bd171f45e533592d7af85b5b8972b2fd1bec3dcb85d5d9d15fa47889c2efc80c5dd854ea9af9f25894f2893e85ad991f3645e79ec364bc01b231bfe073c421eea5b884d7895d01cb9a1d905e45720b0a41e04aa33bf68ac7028e57d2d94e68025bdb6f2fff7ad193c4e15165b8544cf9ed3824fef2c1ac47dfdc9bf11a8efcdaffade096627af9294f"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000107a80)={0x0, 0x0, "35267191a67174233acbfa255c4331835e570391cfd2ae33ddb5adf83337cc5ead0d631c8eb113a74522ddaf57672dfbfba8ce2d7403e1bae0b27116771b14cbd6fa984ed7b5cb00156df9bcf1e07d8bdd6338b9bcbebe8cc02820d80b053226a9c1e22618d5435319d5bed2789848a74d7cbd19882271fca3c579a741603135c5348feb355a9466ad0a80a75d2823540254651cbc35f470dc1ad27405cafb058a6bad01c11a261c02657d2a48ef1a8ef382676330882da124a5f117603f01b01e2e4bef224d8c1dee6465322709491f997f70cdb0aee7026ae47fc7a3d9549464489b568be9309dfa0008c5d9f9b930b048cb543ece85880f5e0c47a6e02feb", "dbe2697fbce38424786c3e91fbca5ef7f1b81c77f53f90e60334bf9143fd9753a9076487817eabefcf3e60b8f14385870a2dc60d7435782be151c3f9a22b7a4f370951cdd6660e837122f1907221549c0c25359067ab57ee6238df4f73f8b76e298b5303a3fb6b4b5f284df80f79bfc8abfb15b6908795f7a24c763a3fcd89e33fb9d1e476bc11719511d76e55b3635ddf4ef9ecebe34d030b9b2f50760f39d5b41da879f4c4131fb00ea0bbf2811b63ae4751ed072f79e5b017c8b43e812459385a4a4644c1698b25718ebafa0c7ab7bda12069785bbae93a0bf969ed9722415f95bd0cc056547f79bafb6d461ea2ce48efab20768499c36c81dbaceaef90bc90762d733e0de6e76a48717b4c7b1bb982e6edab600687bf1a0234401f529d37455abb7752646f8f61d4c88373956350a8f50152bfa57c9db07c863f958c34a0160707c5ba54ccdaa6db7b13bd2e62920b43bbc463d5499e0d2bdd22c59992e5427e9a59ca32a57c4ba56962aecbfb493efc3a86e158a8637e97f4aca61ddddb575d0fbcfbb264444e0a7a7235e79cb658439b8bc4282e27e8b026a344504cb1359519f90159dd0af5ec18ba2ed948f29172dc343a1ce4ec2ab640c764b464417c838aa507dcee4b5d485f6f02ba3f3ee25bcb47ea0a3a11cc85a2c47f2f9b65ec1736861683db6147622e4e6b42fd3efe95892de8da28c98443b501f7b1cba38f6316c723db02c30b5c9c699f6c57f4ec18a3cf98e3fffe48a94a229a7ac70adcd0730a99c46bb9e7244c53d99ec26ae8f341bde237e437323a285de8bc30b9db91269e4dbfae4277aa35b6efdd5889dfe58d90d518601380860a66cf313e3e7474c377a8e7f9c20a4bbcd8ed2390849e5ef58f995a19a8b10a36ad60451ea551a383bb058d4878a2de4fe5ef2ca5e8081e6a83f273dd469612031d9eaf37d4bfa91071f2d3ac9f49da4099c498ec59d76a2581a558473f42d43b8cb2e648727de4767f30a567431478af5041237d0f3d91391a2c9cd49098d4829f3d8b9e7ff7b33f4103fc7ccf23a4f3f877e38c7bb4e504216a6486e7fd76b3a55ea417b25ca60e7df1fcfad9d825ffa88213157aeae1511df69cc0b2dbd311daeac3c68f4e375fe890016f246fd763452e143285acfc39eb6654aea6c5e18d39ebfce02200a500011436b65ef12ea86f7b8a8a79a986b29c23e6831bc4525620a68099e8ff0c58cb9ebc00cb5cf20157e1eaeac125611f237e2fa714c53e8bdc2b8a5bd6de5ad648f408e97b7d798eaa42c396c79b52f64361812bf9f5281ee57223ccbcef10e425d8be4d83211125c04b014243177b6d73463c71bacb27fd0633577e2ed772c4f0b34bd593e4ac37ea57ae30866eb76b58d8d809e6da0824556e1fffd56c4c4e35cd91e1239b1346b887d942f7c2923f046a3223647f4a483748c495c210cedf057a486e7bd41531e27fa696a82538da499ad5e8259018ec635de65591fe758658fae5e81efa3a4313952a01a8eb1a7e49f3ca133ba9129be374ca7a1e9de383506e42b6f509f42ff87be93adeb78ad2e9dc3d003aa2a72fa020472e1addbf4dd57825aa8f7761bcf58dfd758601a2dbf0af9dbb4974a6159bd2a9f5c691986cb9e8e9e98e381192657beef544ca2017b87fd8401cb6a6d67666737c7159c1f1430310c4a3a32ade0ec3cbadaba17ca0b93d39d1ce0a2b77f5f28a3fb7b8dbf3165b3fff7c058800ea2ac4dc1c8dcd015d4bcdf3879d1b59973075538129f27371e52051abc52aee1482130808f3d7cf40a8aad777359e30e468bead02a1d20331d816d26d3ec0e30ccf341d6c2d364f845ec287b0500fa1e3c9c718148d9cb80aee0218fc9d3f692e87dbf69223388fb3f4ce7681f27dda809e34a9f95b2a5b82456f9a36e09c0b13c601452f03a104eeb4f529b36cbc998d9d939b9785c608c3e5d616d860a150b2b336c219fc2edd447238d88563883ba6b9538dc4be608b6cf21ec12a20d8ec43b0042ea2c3cb84d20e9790ae70c54b7d615c10f5e9ca222cef3636324b5e155201f0ec25f80b5d1429872b88f6fa4a848f5db4acb8fa483e763e042ec10b8359dfdc4b6518d8ff71450253c4ba91652b75869abb776415f566354871002cc59c5bc63ceed41f36d4f3111946d3e31060384ed0f718d8fca8fff62ef8895b5f7ce5aa01c57d0e46496e4ad09d953cc716644d63f4dff12703cf34556c5d88faf991f94e87ccf9f491e3f7e9f305c295d84d86b163de716d5c44d5f1119439366159e02b6b8b9a2fc5b469d41a8028d353d75cc356fde2a67f362271fdc99d9f1834343cc0d0600fa299cdfca6f14d04ae9908c7c08e8286d7295a57d2c0a69bd1d0bbfb78de3caa7f3705596a3dbbbc600d2a22cb85461bf215c48c99a39fbec7669c3bc880bcb3fe69f071d9d97bfe369a300bc9e546e1d53ad316a2e9b2f0d78204e4b54f94ebf7db318879d5d6a75502d5a3ddfd8591e85b2f908bd64cf6e0196aca64db8e1d95cea652fe18d7a32b0fd83895afc5633ae4b7b4bd71b1f1d32ce5fa7174b9913ae1d1c53824015052062561b9a17024a1896dc6d6259526f2da416af1c1e04f72d0c3bbf388b71f2a6ee57b78793f6adab235a3ae1ef3d617ec558ea53baeb216071b4d27b0c41d9e1f07e3cc3e8fcc07dc86d613db6435a388f4c9f08c4c670441bed3c20d6b8069e04d48ca5d896bfda5b2ca99e65b7b8b89909aeaf91e367aa6a0db4a509a263a0579b456a239bfefeed13c3a0648ce83615359266c8934cfe83e77f9563c2d5b5a0233c08e888099d580c9d17a2a2eb16c5c4907f8c28b530e1258792b2558aa2a0fc8a216da00de159c5ba851c11a5859317766a882589f7febdd015122ca583d2960b32ffebb0ccb05d89e5c2e29a51cfcfe30a10dda8cc1103faab8bc368030e091b36cbdf70779e5ff8644a75a2f2e528b7a91a0e26aa4dc3181688ac09b7875fce155d7d6b5a81835bb204e537bf0abd34f97b0145debcb7c76912e540e66bc4da77d3756b10d3614d6821041034921cd60db85d83a51ad236c4fa27a9af1ebe9a8e2b34f3738972a304a076412622ed414c3511a4ff58ea5cf5c7c33c9123af4795c667b7a5097b16f8386fb801b31e540f1d1ff7358c3ebbc63da9ca6df25a9570913096af2593c4096f655628f97c304219d2a1967d127bb1f7ed8c7ba341dd78a7e9ab5ad66e5181fc7f629652687a7dfc05937e36f0799640d2128837cfcfcadc3746d640c8581a00fe931f478f1e06fe3fc24fd3ddeb2195b64c3d528c6e37782e3145fd7b2295ca894b8851496e04757b4d338015ed45decfd5d04d22f890023883d99356808ce5f6bd90642d9d870726ca55cfb174d92c37a625328418314fbeaf9159f89ed28830180f3612dc2600e723883d26f38d7d2624829aadb6ed1002761a718834901769555db5cdcab89322620fbf48ab3e76a32d2f7dea64e542e2ccd84983cc3c232941f1fa1df2f280e7d82969dbf6134a1241b88fefd23cfbdd9dbfa2fd6c720987d249ba81636a470184b28c51b4aab175f35afd861208d8c9bd0cb316b0e179a0a6258551f16f6c0f9405588b035b34602fca3197c2ae09eb0480920a90ee13391837bdca26986e2935dacdb05dbbdce6a443328397f68ef7cd2a5c70f58dd470dd1201e2fbea602d5f227b851de4eb552bfc74eb7acbb4735c97094a9b0967744578e7c39d70b59c307297736ba2a98b7a80366f509d62c7f38aa3599747bbc68965f5159e291ee87ab09544a41aa5e04326084149a7f5816eb1d8259b3e03202919ab4256d0eb36bcb3ed999ecd4c1110dcad83e32825fe7cf3930afab013ae5227ecb2d3efe688c1799000fcef21740fbdc6d485c677fc08ab8aa73417176a56e3d555fd9a8a7f72a891e9c266a9b326e0f8450f8eaf4a851e253f66652102ac26483ed2471a8db915e7101c78a4a641c9d4cce7b7c713a4ad806f833bcaec71d2e895a6dfce011090bf5dc0de87dd970a92534ab7c8f4c85b6e91c9cd11330fadbf62d5a1d9309e3a3cc822ed7b50fc7053df6b93d41b783580cd171a545deb8e54ff5966d147f1939772e0ebeb89fc7d38b78e69b7fc62713aef8d7ca14caac21f54765ecd3517d7589e7b3cfe0803ebf2534d0657403e783bc7da6ea3e168a47f6655cf1342c4fec7c57b7a0de8f0763c93d259097f00400a7e1674b3f9b174fa4b0aea5ef0544ca507e426e7b9d384bbf4e4139e962a1b722ee3752f397345c0a2fa96bdab7435d27e34b13d3bece1818c330d9cf099f430c679d9cfff10ecaec30d92a1014a3456d06ddd084e222a00ad241539e755f2a9b34c4f91f6a83be7d7cf78afaf3d1e797682dd4e1ca501c2bac292bd5bb0c39e2bcfda565a27419bf5bd81c94db754bd234c8b33149dfd23c16bbe7b863d4150eda99c8e218671c2553ec7a36810c0b084284b8a84282d2617ad6341e5d8839e38610414f3b1ba475f4ffc82fdd73031989b6a53459fdd55a75af5568a5f64484e2c4fbdcb1a05ceee4dbd23a672aa0d59acc5b3dfeded8fea320a327fe9ce4962ced2ee4351e839ca1497e549f1a7c36620b0002e89e7e45ad87db2528af7a63c4faffd712fc89a4a2df30c0956bf6016498955626f8b871a4af0282c5d5c6b6dbc47d166458833e85d2e4449b4878dcf83c5fad0cbce6081fb23912e04dd4f5180718b5a761edcd48d0aadfb50e60396926db3a1a47f006dae4764a84ac7c5bd9d9a37a4f07a57b0555d02200442ad5082699d8d4b1f969b845ccc5e5bdc9413886d8def3ad63bf618863c24ac24516d665e42dffb6b4eba1d05db5805a13043cfe3380c6700b2ccf77b64627687d16f06e8e1ac0e540d821708f15b5935c1fb55bc9d62f34f402b592435f4c49c2c7b3b97a0aa4b7dd53a8211c89bdbf37714640113af5e32ece9511342341371412a4eaa6ceac08a9fc833dc8457bb3c8750e2c20cbaf0a93c5aa995a2ca29e39621694da7f15d122be74075434a8192774a84fd7ace16dee1dcd95e42ed1b2521d6e7ba20c1dc822c164166ad5499bf05944447bfbc55b6096b7c4a23e57aa9d52378783b6edf2213b0f82ba0c816d314e70bc1a726e775120b046be2e8de1d460a6edad6148681f69acde25dac143519273a4258ae69e9e2943e256e0682b2bd01e090bbb0093b73501b526d922745d7adacf844fb0bd3330e22bca518668f27b3111dffdd0e29cb855fa4d7253604b391495d4308fd9a49cb13e056a8b006691e7688c13b602710bbd9c4ba63361589d607b9c9372c0dd313f23589e96c9c879f6ffe854bdc745a9a4ca2f92ab7a1a9a22e63ae1e9517e9efb4c6da6b97fcc522bb36179c98465087500020022e7231b2612708c2072c0079a46"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000108a80)={0x0, 0x0, "bde2cbb5497e0b9c60d72ba065c40ae36b09efc6d252654547649a44a00e1e92e9caeed10e624c375f95b37796c31a40434f77776e95e70613416a27e9f5d0b3c884045398af24240312b9cca6a53c9023c0223a204aced2523146cca7bddb7df5362e3c2044d837beb31ced610006ab95dce883adc7a1b5099e96a1c40d1559764ffd03c4cb55d48b2e05765ea10e261d4eb77e365360ad6ea8e288435ce17e03eb2854419f17c04680c019c1d394215f956979772e0245a99b4926ac7e019dd8d9f8aace05772a7faec4c9f415205b1056fa29910a09b000fac72a2a9f0f8b612239169e81adf58a90aebd871a9bb85f4e49e5aed6d5a11c1543fdc56928b6", "97ba3502523e82b9ba8801a37294fe19e0c7ee69b16a0de0d59ce801d329e6bdb73be07e8510f66e2e0a4ba778bf8c68273325f627b67466d6360acb6f74eb689fec17e5beb3f4ae5945b68bb4e199375671062bc73120dadc8d0abca6656d5b1293648e4ace188da7635f827ebb4ce0cc8f57cb5bedaca53d287082c0b7164919d11d84bc287816e7c27553d3880431d1636ab80236d49625df0a803300df910911a9f88538544780b1873f385f6b16532560a8237b4713784ec288bbfb8209c7f452fd869459587c578faaccee5f41ea26ff1657284f91608149106dcd64b74663a5a0dde090d46bc6dbde14f5ec22a0a786ea3a0840921fc945b7e65f93b0a1820e5271358fd5212f058c805467c94410b134eba7fb4afe632dcbd2044079347bead4a493545e5e7398346e79e26589096794dffed97bd4377f4baa78e1738b435a3767b3d6e726ddd0510d2ad43457612c1a71943eae1bec83284896ad4f80d57da38c9e5cc040a098ee27b054332e64f8a9d5af685d2eb582f16b48a91eee2fdc8e2bf0a1243c0143b1c60f2f8eb8afb92497fd939fffdd92281c588c58ca81878fbb15f0a03dca1b9ba3f42319b253df7c4bb77f21ee3e8d04346e8da83698fd9767d698347579556e1d98e305b68f960807252bfb1d8a6ce8201bec6205b57bd4a1852dc5f001906f2dbb85e2ee99c3520e85c21817c8a859e1001e416440bf4631bac95954f35d67177cc80d28cf8454fb0d7eb6819f30a3655a4d11974988dbb39a6bc53e138488dc29ff6a0a3e849ad59e5a7e88fe82239691b3f8040bb75c1e6fac9603eed238abf9c1c6216118fc547cf7d4ad900587220da56bee8c647f59f34df9371d8ff5bd47b08e176e3f2f0e9a7e315d29a26b170e6d0b6f0ee27dbacc1e9667795dfab0f7b1d151132503459f57775a47ccb325b178cf7acf9478d78bacede5b568460ab68fe3cad0ad4ec0dbefcc2d4886c200115035ad17e22a4a634526854dc00bbf814f59388e645885137cd55de6fc152634e2fdd7d41352307f3b13bd92de4071254cbec5706e7e0a761c52a5fa81c36f0d4d8ef39792498b63cd43c99e07e0b84d167267c4e8e2c278ce41a8177c1962998590017acc0ae85566fb0d1049fc91c49b38379760df03022ccb478724f25541e145cdafde627b6572c64564dc971983b3549f8af5f2655e02cc6048a6b9973ea9634830b38a31e3521803b31c86dc89d7ea7c9fc05026553e4c5c64e962fafdc0e4090142e3c263f8dda5f55d11e4f973f715d6a3c27c9348f2cde6c50773926a3d14ab4b8db71c877e542a98daddfae5cf30a7ea060c163ac7d968deb1ddb70510b847743aed1c9c306dc536ad7355457e9aa5c0b8dab3c073dc073f2b54699f3e88141d8f492637b8d7a197c22555c29aad90f9aa50c41f3af04fbafa98fe7d16404dff2a93fa16591dc19625b7922700e1d314dd010a85bb8156a33c4f46f64d0a645c50b95aad44bafd8599b1469e3c1f9882d42f145083867af96a187e26a0eeb36b016531c81ccdd7f751aeda23cfdf9002ede83d7d8a8eedbebd9737c518945fd126ac2eb856bfbeb0bd35604cd132cc84af98b099d4b8345681e0299b6e33047a74ea921a7c60e1ecc68cfa67cb238e9c18821de181db2719f4a298f27802cc32c132282a4be1a01e700035598c9f2da89f0aaff0350e5ab1bae78365d45c952744fa5a0f71e5695e9bf88e461d7ef87683d46c4ef9590f314dbbe061b48d7d237a84f94927ec3902f023063bf152ae18632fa09d92ffa354bd739e253c6a4208d036fcb104f05896c4b128e147e7ed9cbf0683c744a1a383963707c8d417c4fe5f4146310b09416262bd9ea02edb6ebde26336300a2ab713acce9344761132eab3546b270e1664ba6b8f643ceb115c31767881836d164f9a665ffb69166771d98d50b580462995ce760aada92cdc7d41949f7b70606e3bd71257baf04e4cffd76d04feaba7e40231e8467c61d30593e37e76175e610384d342076e3f357a01f1177df5e327e6b7bf327756ec9203f7be982034f48a948859b6afa998a1bc53b8d9825dc8f4e82be08d44be6de2731de64e9938a66cd5cecc5899b9724f1fc1db39e6cd49f3efb72116472fd8768a303f9f0eccec170eb24176838e43509d9d3ccf7fb61ef2bd987d32b7596e55e4554815e670c48a6cce09747cb7389a8e5b2889dca2b3340ad751f7ec7c86637ad87cd81201b90d13496db6cc5ddc2842f9fd06099bee07abc1793901902ef1070adc3b7774a5175dd05f53690c7a7c391086c5708edaa12ead7452f987fb4be97b4fda97d670029dccd11dbc00fbb39039cdfbee4cbf2b5ef4d58859f6f70fe6cfd3078155823122de6c49666c1d5086719549af3f332b38fad6811d2a3d1fc57deed955fa7f7114a47456693c6e80f1d51e6bc7d88bb44844458e16fc7bcb6e400251891c3180293eadc1b7df2da1e6ee91d0e0b4b4064dc2945852018495debcd95e56c84f251b08abc3ae39a8da42bdf171c83818cdb72d2341abdc1ae37e453c477f70eb2c4c6e5265b51d2dcaeb0b99436ff6e2a9f4cb6a0202c2d4b69205defcf62b9c0c443b250dd84d3113e09e5201d25c29a647153a91bc074e2fd75a5c2ca6a72deeab007eecd726cd983410da133728789570f495117b4b54626ad5d08c4837d27c44b244ac5ac25c130fe7da9020c9e17a43ff2f8fbb5c2ed6214d1c30311e57e9e272a0a41c9d6c0841d1822872a8639896dc49fcaea3582eacb7e70dca33ac4a7ad361357d8640a5919f0a3f8ed641a807e21a7f0413684bfe3369bde73839faf01f569d49074ac8ca46ad075035c416825b27f33e19b23334ca6d08e83e15b88a20049bc79236ff97cfd3575339ef35dcda3a231a9acbabd1015611a6bdc2ac5611e4d3eb447ed636361ebfa44764d3572dfc972423ffa0e7d159337b1562b877c02304bc7512f4786e3ec5ca401f8bc365516c3db0c3bc43203e61cebe733858888cbd1f0058ebcc142adbe3c499ea8629323546457cb2ee7d25c184fbb3e5647a6811c8b3e6e2be925ead2ffc6cdd5396a93029e025d7eb8cc0ed92dad6f4c4b1b6fbec988571d01f5325cc202603ab5caba2344a8f73121e0cfa9c17c0c169f62fb309291f913f337c589ca5d4daf7ef265bbd79d12a20e07aa215fed8df75d800d780ffc6f984cb2e570aa66498eec6a8b093411630979e4d2e5cd267e6f4efa58e5f5b6938a4ca131c471b694eb360a221c605b1fc7d86a82952d3ab03d95031a45f881bbc80f06d1dea32710bf09b9ffacf670fec163988a8dbd5c78e1204eeb47651c4e58f78a239e7301cb07a0c197605410bf1b052cc948d19653a946f2800a6e58a067a35920699e68b2b667cc937993ae3d630085ba94bcef0e559d98ee853731068bd87c095b766a5a7b0cf7d6bd39d6a79e89d8f55d8cb08b01979a4817d5af405753e8e9249bb811d0afe4273033bc85dcc2c4bf9ccf90fd103294982c902e3c383ed31432215ad31b5fe07abfe5d8b8a660fc0c5906f21198d3b9b811e198ce112fb709b6bf7791cdfa468bc34aed6e847a0fc60766190460a8e6ac54dff4958011ac6bfd7bf6bfdc33eec5331b88916f08cd7dd50f465e42215961fa806aa439b87c636ff937cb2e6f4d036b7c8d419efeaae5456242c622a4638e902192fda33c440d8dda0ec4ae93af72dce2884f9b0fc818a78fde8a6d07fa7e0093c41a2c084a920ce7c4e4115ce92439a71915c08d04738c35c92e13c4a4fb57c18101a0d4fc800f0b0a248125670b20b5214f73917ba3bab1a1f1d67d4d339e0e691e3e726625996f757cc156be5e13141163826b49f5d50bdf4d9a596bb62745c269471f6072fbe65e396fa365f8fc845b4d240a9aeb63fd6fa12ab1e0fd05ee540491dc5bcefc1fc4c75b0366bc46243f0776dd3e8027542ec3dcd95a05454ba20a6079ac6c8819d643ba3b5309f10dd725c770d1462d79880d4f4f3f2f57c514ac9496de5748225cf5cd3497053987931cb23152adc35d873f18be6cc50357ec258e51e42b8fbbabc1a28d3f7cc2a1849c77880f29caa4587e13a8ec85788376129c3c319c8a5e24c9b0773a16de5bb38be2969adfbf37a8028924773f2cbb62e124c75ee326b3567837ddc451e1c8b05275395f8a4d6b83eb54d010ae704d038a370dc51a16bc7ca9a29814ad87aebd74135b4bbef73fb32eb3b3e6b8cc7bb47916db8ba3843dce95a566fce559c5732579dcda053fdae4a3c6f891eb203d8ae7259013e661de47dabe12889e36dc95e28e6ac1b95bbcaac65ee54b7e91f564965b5fc6632bef660f63b1009a38f1d18ecd3539492c1906f6865ec38f9a1f3a0f649a951377785c936d2fc9ef15308dbb0d9093375e3a43f5c986fca8e6c255a44c693b196d3aef45295f3a301ff0c6709f9e17eb91a8c00c6a8929844cb73714be13e40ca5c16424a2026e2f910124d52b69ba16abc0bc66eb09e9249748ffed446efa0ad8f6f2d671d637d2bbb9fcf1efcfac94456979791cab6c547384b6538eb936dd644c20328ef442ed80f89c9e9a0f167c99f546e9b19d0615415bb8e97c6dfd58a6bc23d48bace8106fe58175061b5f07e94d6be25a4a5244b6eac713fa765cd9103a0f75d92e03c2a0517d0e889143ce9803c02e580b5626ac9feac454f4906b18c1296c2d0d435e92a368e6f195459b6efb2d0058df7727dffb6b189e3db8252f8a5266a9ec47b003f56c5e19ff85f900d72987454e2ccfdd69452f87696f9f4712f19ef9203342cd25d9cc817af9626d9bfecc2e83548f8bb1fb36fa4c0a900567fc8a37cd831a8213ed0e858f6a01862dcea373d121c5f4a5adb97bf9b0f248f8c679ab721f22bc3a65a21ea70cbd6850361b84ee9b3b3a97df8b53c3fb3c5fe12b1bf65c183ef608213deb9de30181bcba33db7b79cb61071b38dadb9ba0cabebf0760eba3e470f4e7ab70173dbc905ecead08f2ec04dbc2c6b64c54fe75bb4fb593c02e9c440d773d4c5d49f09a3494256445e9dc740fa3ebb40c4dab6a8b14ad62b66cb8edd56a040e1fa0b387aadbffed2003c60fda472c7fc27d48d9c93d80c26745c4d0ed5320e4f57832e800143e196f3bd777ed9edc81fe2dd5f7c758b1d7a9ddec539c634bd38a5912bb4be1e9507434aed53831bc352c47de1541d39b628d2f2d9d815d30c86c238526b90c5e72d90b5d2d39cc2341a91a92d189e2b13bc33dc036f68260bb4a1f210f2c8cee8ff0982b05f501eea21b59b079f857a55f6acf1eab2bf1187f296bbf8cd51ffd95a52fd054e63d1cdf41cfaa98c2e5e15c3639ab1acc85ec369284e237717715aa114a004b3d60d22c0a7a1206b39325308330c597bec8c4b68"}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(0xffffffffffffffff, 0xd000943e, &(0x7f0000109a80)={0x0, 0x0, "dfaf4f68fea4c9511572261348d4aaba190898142ad804a8c3b50f05a8198204390408432c74e67594a14779c6d6ce4b3385076c32c35979fe4bead70f91c44c036006698b3938d8fb149e684296ca238cfb764eb27c5e18cceb08720602ddd36e534fe441c4177d5e8b6b4dc93cca506139fd5117fe6fe358557aa5a370721331520cbc8dc0be9cc05a9a0f9e39026a4b550425c069bed9f46ba074ddfb6d4ee21b40d79adac895f2665c687e94b8b017d2ba84bba505ee6ffcb736eeff33a75f1eec2c732a018e8531f1fb4b7394d092181c8ab7011601b4891e403542df32898e366aaf37ff8fb58b6b7b43948ef922255cecc6a49cbe66e0e614a91370b3", "c55a444fb96f684ba467c9ac37040d1612bc298ce98aeab7dcb9c227d1e2514e7852bacc77ba4c848476ff479a00bb22cebe257c02b9bffe2d437a5a741a8cdfc0c1d2a3b53ed71a436000e2814f481b40a84c50b97c89aa00a54de92a0a63dfeb18bb3152340d80aca5649b2bf1afb9d23884ba39862cd28f7c34553abfbe3bbade9381dcd5ed0005bea35f9ed367d1409d23d210ecddcd1904108e810bd0d8a79376149889cffb8c2774e7d51eb0c89189fa520fad5c876020d2d52e1cd057db5a1fe9da903c5b88c95b9e5bb4bebbc225f4870f945c71e1e92dcfba07c91bd777841175baae43e14933635fe4b79361445fee5218d82d642d21de7d84b83cc115f888c34c1db6d86fea20e3af4180dc38810da65a072bf32d0ab0e396c83a8f138237315bae59287c27b4a587ad61fe166beaef62214a6bb40d47ed77241de3d0b1f6106f0c9d0c47c8f7882efd99dae66bb68eae19645c2db07dc1309eb119e4719e662ca39df3d44f080332a37809c31d63f408441328c52869d430f9f2e5461ff94fdbcda698664b8088c0c62a6e7e380c7f27bfc5109682ef63ffdad05c79d6fbce248b4510afcf331a7d821f8a24f28fe1849242028e054856286e940955a078b19b18aa519dea23376719216fd9c903412f0ae1fef93b6ed0adc17a4882e49047692f6c0f1bec981b100beedb41365d05e05a2221dcb782467b248f2d3c0cf5e0b87540acf9e6d1f5503e3df7671be7f7bf0454a4d7a29280019c6ef3073a0f92d84b037767c2da43fcb353f1912a83ecba7aee8e6b5b593e34be17113615fb9f7ace9c8e0042066a12d410e4876d705df055204720b87c8f5facc3f31f362426f7f9becc25d9e781cf47b16e42715b2850e37f57b5fc20ea55811bcac390941e408cc989270dc2699052c7b10ac321b30ed9fbea12c0c09a14a087ec4021ed4b574103251078043eac1a1a78a9c98f7757ce8beb5d99fa3d7d5c6d38465545c4ef043ee7672a303906e241cd8d0fd9fede6ea82686f224e2f5a3d4ff025ba5d82e300f9fa159da41387f01186b55f47d243648711fa96ea4ebe8b2825255cee7d5dce64996c48ca1f643b518288f1ca23d40d5607fcf14eec5a819971650f677331b8c1dff76fd473ebd5e94e78f49a4ef492deca4377de20c178bca76a29f3e9afd5b37c1cfbb3df6c8205b7124671478a833cab0dec08a224e9526dc1030bc7aa62e9031409b2bd1e089fc742cdc7f2e23edebd7d6bd7e4a09d6c4f07aa2b6d7a90cfb7e6aa1a898b77f3236ae7ff73f8bf77d976ffa3a8dae35986201d390c0ace643b30a49ef98ae7b6e57f7b09c64bc9e30d68e3a35dd5d33ffea472fbb1bf5c2603a669d2ce1f6663155a9494e08022d0410d0ff6033f9c887ec875fb4cc84060e72067be5a9201153380fdbcd97f441a62291bcd91b09dc7aee662319086a47f84052c3eb260b73d775e136527918de754ede1bb4108b4c0bb8a041b8bbe792734ba2067c07e717f36119922682151a86596e47ab08b906e17878d7b2d35b5848d93c528deefbf3cd01d4a87f98bc1f38ee4415366285f55d686605208ab9552cb082d311c96045fd42c407f0c1cf4b0632c4c09dc73fd3913684beb5ca319a54adb119ba329dfa67ef74665c30c5377857d7a8d567afc8b648324dfac5dc1af7c09dd90d0e9e92bd05d25ec4e6e1f306fb2f99a89b5ba19aa47c1ea7952288c30abd10097c8fcb767045707f9e9a4c775c5b50f3aefdc02ac1e9c88adad18c5511d2398fb3fbabd34b0e3b6e8dc76f19cf32a19086300c5175265b320f9a19521f27d068f3368185447ede7be89355a5923a9041479443b92cbaa52557b70046a07f1821965158da1a919f1f26630e19657085fc1fdce71a4e07f1c15792b5bcdf33af802a09d271366df5fbe91ccc78f247e03e75a17d4622cfbb7d5b7f06b5fc7be60b79189d1eed840842615f1ad4fbc57099b9e2f85c4e383c3b0b4dec4b7c300bff5b26d3bb207f28aefef81e3e8b2b9326d1bd68710a8eb2005a8cfdf94712159e9d880b39dab046c6a3f8a6cad68128fc5ca4c46fcd358fd96b14712547975b6b89255815c781fb4cca830e5f9ac83c5a135354bcf013fc4729ab96cae1fd03e9aa2e2ff7d841e10815d24f23c40c0dc73de9930ed41e9efb1a730b22231821393e019f644925c2bfeeadcbf0e0595caec626356ccd7339fa70aaf4900b30677137aec598637508ae2884bc10169fdf4b1b5ed844617a466ba42c71801fe1552e1c302f1f2929d489b59838d84d9b0ccb890684c1d85a87fe6630150dcbb5320749e878f327592234953af55493172ff697c62356bf2e3cc429f4d3657702417880d1d55a75e15ebe5025d346b9c4dc13b961074c34f732fc095741f77bdeb88121c58a6945b20e4bbd79c4eb3d5c49782493e5739fe0d0d080f825574ca992f15ca2a10733d52550a8b61aa4e9083522aa5cb68e9a8636c67e65d7bc11b4826ef70ebb433156702c0c6123eda28ad36b9973e47729263ba7e430ca0fc19d1a1b65cfa698e132e384dd0e7a0652bdc1bd95df9c43e067cd57e27771d6adf163d93d929da6b23df3d894bc2250eb5c23166ef8d887822bffe37fb5d9cde2502e9247c63124529ce8d332134541b3c172dc5f2982a545855be424cfeb57ff1e0400010b8cc83fb7d56f92bcd6a8485bf77a9b8ca1a4adcd7f7aec958195d3ab39c647efa03de85e4d6228370d38d5fcfe615467ec246167a2b55106e2b37a6844071d8bd23eba2af31b19d6d0333e5c1c2e239f0bad38002f6acbc32ec0662368c390c1b2d2f8a82232309acd201cbd424cd455feef3f6f10319e940197d4f40790487e18d60dc33435be9ca463e7a5da542c607fc5d1e357cab90a5a6a00b82871f3a4acbf2770ecdfa08bfaa090ea9976c02a3f74ce700b4e994e33c19ead284ae2edf563c4dbb498dbf1d60b9fea4b762c914e8ebaae851d842b86034eef45cd449eefb0c8643315b6cfd99ddaef176c7bdad40643f19802473d356959d4fab0ddff8dcbcb07444991efbfccb15ccccb273777593c78b1451a864ce1d8afcc2e2861250419b67ca8fea8a59735b3f75424203401b82ec9f18f9ab82d1a2b2b1a918487164d36f1584e8ade304f8633ce0da14cab984128594c2fe49be9706466169b9233075b34c5a4c018c2d8961aff89b3187200346d4c26030044225d35c31daaaf74d09e2e615772dbba5cb108c0f150cb4ccd40c03e8d00931f41a027fb19b7c9c477df2f15447df598c749304c05d8184ca0f8416dd7c5e2474a23dcf61961317249f3c1b304b3f21dc9d3cfd3a7987b90e9fba232fdb3ade8bf21584385e7d480e42adb927ea718298e225ade40291519d7040b184581de9a3bf40b7c3c2273b53926767510c24487271b9290907a426d32985f27c002c66dbd15350a5dc4de29972f5146013cd7373dfbc63b2701e9a830199afc5dd87588e863453e1b5644879ea002fc04d5db6bee8c078b44142acc9ba91a152839e2de4e538635d99a6a8b636db1bb1522d72cf6e0eb00f3b83f400187d5205b2556ec6d125c61ef29f100d14e979584343cd64121be8a2a0acf11dc73537f6739255523a1b8254cc48f8576c05bda155d34d17c5ece4a9a256402c4a749fad8733e5cbc2f0cc00eb8ab10f5c22b7207a5e6fe048d6fb14b185170e25a246048064ac711d1641851c06e89531d3061cea216948d9a7684a304aca6643cf5949cd81218e1b6934d82ede93d89c4e3aba340226f85d46bcba67b97412645740fc34384e3344f66747d86dd0749e6313a18970e6558088b619bac5298d81c4cd5718840252782c80bd916b2b10c632e51cb36765078ffd0c119ef5c877836a2493a3a3e3540e93de58ad1dff5d4e0f3ac05e805933a1b1ead6c51f4a60415b2cb416203afdcc30f146118bd59f77d2fd4c900bd8b9f64668181232ba92e9b3f272f3d6157016c3b500fb1201c48cdcca109466974dc576fc5c7987027c69f1aa809f9f81d42e1c4e5e52d586c5ef96deebaf13516f3d62f9573262aac45630f517608e9085c3a508d5bfb8c220f10bb7bd224c1a678199256691c0c3cc9145cbd6162c25834b495903b6dc82486935139129270d961110399269b398936ca31bb7085876d9261cb842dd8eee134e7f979c3b6614db704230e0df916c702ec192feec680cd87b04c1048702c37c2b3afca7d8ab2cf4781600934ef2f89b6177c74d2a9d1b144847c0ce48ca596adefabb20c7fc26550d279ce5fcd3f128e3307b4aecb56c3a86f257c724b7b8bd6565bb408189a41aaa2ff36154ac1ee7291798dac3e89a5efe5a381d8c5aac949309744fc0aa84fe409f88064397ebbc662e3f5dbeb24b8c8628c48603124d395785a30cae026effe2ee1b79d256d59f4331ac81411dda3294f371ebf6427fcfbddf89486b3de2b291bc47b972a0c6995629023b4f879e447303fde1bf8a283fd25ee711b95343eb603ff30b5a06f8dccd08d2aad50ef2c096ed11251d2ac2f4c613c396056848cfa0212179c7f49d03b5947417f0e7e721f1b1d75ce7c836ae5334de7d1055b0d67fd35f015f5169074b7484ba38fa39385dbcfe3f61e2cb94d99c31fb4f4195686b05023887bcdcbdb22ff6ff69062f7b5b747f6543cd589f9bf6e204bbc31aecc34d1c128c4e353a8232bcd3d493ec1c373add84dc7fccd51a1e244c4228f02a5bfa7bc71354d2300eb7ad27f3a3fe93821ee8251442e529bb90ee8dde17f5ed3e710f99a54d828e848449c92ce9002b30e4243d05b7e64d62308e3c9d29c7d088fd1b942ea0e55973a0428d1e70adf98656cd32db7475437620499691e6d553cb7f606778b20692218cdf76a14b3771f7be8e55e0e0dcd863abe481be367e203390345397c7ff1925cb3b3414a5fe1c64b8f6e1229a158e0f7d41a6641b5f78cb2728692dab94d7e41f0af49935d90c2fd8e715dde1a9c23c5f15ade136905a58975f57e30a763847e5533a43b61acb851347dfb765d02b3d49cc22657de1afcd8c554ef6b04d87512ee5ce6cb8ab85f1845193155fca50757eea958ef188642237ce2c87d50b2e5ce16b5ee95d4d38622cc8c4816cbfb13ab9e865141a090bcb1f04d509157bc86a36dbdb433a428cef964824415e0ae7e44fd4db52d0f3aa5b245578d9149f1f650b1b42253788c63ab11a97fecb457862f7f00337937eee890e6d10dab730db71a57e0858ed635b8c90057389b5fb5e17c4998d89103b2a50132860acb670101a103d359f98790323a57c2aa60787ace8bf039661f7589fc8ac627981f9237bb701a8913e026fd28cb918d7f345eb6726cb20c447dd21493c22e4312cc6be76cb1f09faacd407273779cc6c82a5f"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f000010aa80)={0x0, ""/256, 0x0, 0x0}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(0xffffffffffffffff, 0xd000943d, &(0x7f000010ac80)={0x41c, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}], 0x7, "af871e63be11c1"}) (async, rerun: 64) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f000010cc80)={0x0, ""/256, 0x0, 0x0}) (async, rerun: 64) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f000010ce80)={0x0, ""/256, 0x0, 0x0}) (async, rerun: 64) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f000010d080)={0x0, ""/256, 0x0, 0x0}) (rerun: 64) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(0xffffffffffffffff, 0xd000943d, &(0x7f000010d280)={0x3, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r0}, {0x0, r1}, {r2}, {r3, r4}, {r5, r6}, {r7}, {r8, r9}, {r10, r11}, {r12, r13}, {r14}, {r15, r16}, {r17, r18}, {r19, r20}, {0x0, r21}, {r22}, {0x0, r23}, {0x0, r24}, {0x0, r25}], 0x8, "8cf69e6f99b7bb"}) (async) r26 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r26, 0x0, &(0x7f0000001440)) 00:20:21 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x60540000, &(0x7f0000001440)) 00:20:21 executing program 2: r0 = openat$zero(0xffffffffffffff9c, &(0x7f0000000080), 0x3, 0x0) r1 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) setsockopt$inet_mtu(r1, 0x0, 0xa, 0x0, 0x0) ioctl$UDMABUF_CREATE(r0, 0x40187542, &(0x7f00000001c0)={r1, 0x1, 0x1000000000000, 0x1000000000000}) r2 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x3b88, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x3b88, &(0x7f0000000000)={0xc}) r3 = syz_open_dev$dri(&(0x7f0000000040), 0x6, 0x280) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r3, 0xc04064a0, &(0x7f0000000180)={&(0x7f0000000080), &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0], &(0x7f0000000140)=[0x0, 0x0], 0x0, 0xa, 0x3, 0x2}) ioctl$UDMABUF_CREATE(r0, 0x40187542, &(0x7f0000000200)={r0, 0x1, 0xfffffffff0000000, 0xfffff000}) 00:20:21 executing program 1: r0 = openat$zero(0xffffffffffffff9c, &(0x7f0000000040), 0x300, 0x0) ioctl$TIOCGPTPEER(r0, 0x5441, 0x800) syz_open_dev$dri(&(0x7f0000000000), 0xffffffffffffffff, 0x8080) r1 = syz_open_dev$usbfs(&(0x7f0000000580), 0x400, 0x40) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f00000005c0)) r2 = syz_genetlink_get_family_id$ethtool(&(0x7f00000000c0), 0xffffffffffffffff) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r0, 0x89f0, &(0x7f0000000180)={'syztnl0\x00', &(0x7f0000000100)={'syztnl0\x00', 0x0, 0x4, 0x4, 0x20, 0x4, 0x2, @private2, @remote, 0x8000, 0x770, 0x2, 0x40}}) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(r0, 0x89f2, &(0x7f0000000240)={'syztnl2\x00', &(0x7f00000001c0)={'ip6tnl0\x00', 0x0, 0x29, 0x3, 0x2, 0x7, 0x3, @empty, @ipv4={'\x00', '\xff\xff', @remote}, 0x10, 0x7800, 0x20, 0x6}}) getsockopt$PNPIPE_IFINDEX(r0, 0x113, 0x2, &(0x7f0000000280)=0x0, &(0x7f00000002c0)=0x4) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(0xffffffffffffffff, 0x89f0, &(0x7f0000000380)={'syztnl1\x00', &(0x7f0000000300)={'syztnl2\x00', 0x0, 0x29, 0x7, 0x29, 0x0, 0x0, @initdev={0xfe, 0x88, '\x00', 0x0, 0x0}, @dev={0xfe, 0x80, '\x00', 0x18}, 0x700, 0x1, 0x6, 0x7}}) sendmsg$ETHTOOL_MSG_EEE_GET(r0, &(0x7f0000000540)={&(0x7f0000000080)={0x10, 0x0, 0x0, 0x4}, 0xc, &(0x7f0000000500)={&(0x7f00000003c0)={0x130, r2, 0x1, 0x70bd2a, 0x25dfdbfc, {}, [@HEADER={0x44, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_INDEX={0x8}, @ETHTOOL_A_HEADER_DEV_INDEX={0x8, 0x1, r3}, @ETHTOOL_A_HEADER_FLAGS={0x8}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'bridge_slave_1\x00'}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'ipvlan0\x00'}]}, @HEADER={0xc, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_INDEX={0x8, 0x1, r4}]}, @HEADER={0x80, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'pimreg1\x00'}, @ETHTOOL_A_HEADER_FLAGS={0x8, 0x3, 0x2}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'ipvlan1\x00'}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'vlan0\x00'}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'gretap0\x00'}, @ETHTOOL_A_HEADER_FLAGS={0x8, 0x3, 0x2}, @ETHTOOL_A_HEADER_DEV_INDEX={0x8, 0x1, r5}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'bond0\x00'}]}, @HEADER={0x18, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'vlan1\x00'}]}, @HEADER={0x28, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_INDEX={0x8, 0x1, r6}, @ETHTOOL_A_HEADER_FLAGS={0x8, 0x3, 0x2}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'veth1_to_team\x00'}]}, @HEADER={0xc, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_FLAGS={0x8, 0x3, 0x1}]}]}, 0x130}, 0x1, 0x0, 0x0, 0x8000}, 0x8000) [ 1221.888212][ T3198] netdevsim netdevsim0 netdevsim1: renamed from eth1 00:20:21 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0xff0f, 0x0) [ 1221.943997][ T3198] netdevsim netdevsim0 netdevsim2: renamed from eth2 00:20:21 executing program 2: r0 = openat$zero(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) sendmsg$ETHTOOL_MSG_EEE_GET(r0, 0x0, 0x0) r1 = socket$nl_generic(0x10, 0x3, 0x10) syz_genetlink_get_family_id$nl80211(&(0x7f0000000040), r1) bind$bt_rfcomm(r0, &(0x7f0000000000)={0x1f, @fixed={'\xaa\xaa\xaa\xaa\xaa', 0x12}, 0x40}, 0xa) r2 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f0000001440)) 00:20:21 executing program 4: r0 = openat$zero(0xffffffffffffff9c, &(0x7f0000000080), 0x3, 0x0) (async) r1 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) setsockopt$inet_mtu(r1, 0x0, 0xa, 0x0, 0x0) (async) ioctl$UDMABUF_CREATE(r0, 0x40187542, &(0x7f00000001c0)={r1, 0x1, 0x1000000000000, 0x1000000000000}) r2 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x3b88, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r2, 0x3b88, &(0x7f0000000000)={0xc}) (async) r3 = syz_open_dev$dri(&(0x7f0000000040), 0x6, 0x280) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r3, 0xc04064a0, &(0x7f0000000180)={&(0x7f0000000080), &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0], &(0x7f0000000140)=[0x0, 0x0], 0x0, 0xa, 0x3, 0x2}) ioctl$UDMABUF_CREATE(r0, 0x40187542, &(0x7f0000000200)={r0, 0x1, 0xfffffffff0000000, 0xfffff000}) 00:20:22 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x66631840, &(0x7f0000001440)) 00:20:22 executing program 3: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x25c1c0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000140)={&(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0xa, 0xc0000, 0x0, 0xffffffffffffffff}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000180)={r1}) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000040)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f00000000c0)={r3, 0x1, 0xffffffffffffffff, 0x1000, 0x80000}) ioctl$BTRFS_IOC_SET_RECEIVED_SUBVOL(r2, 0xc0c89425, &(0x7f00000001c0)={"7d568c3b5e99d02cca126aa6650ba9b5", 0x0, 0x0, {0x98, 0x5}, {0x7fffffffffffffff, 0x6}, 0x7, [0x40, 0xfff, 0x8001, 0x101, 0x4, 0x200, 0x6, 0x5, 0x1000, 0x1ff, 0x6, 0xa, 0xfff, 0x4, 0x0, 0x44]}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f00000002c0)={0x0, ""/256, 0x0, 0x0, 0x0, 0x0, ""/16, ""/16, ""/16, 0x0, 0x0}) ioctl$BTRFS_IOC_SET_RECEIVED_SUBVOL(r2, 0xc0c89425, &(0x7f00000004c0)={"1efef7e12379052541bbe655f0219e7c", r4, r5, {0x6, 0xffffff81}, {0x0, 0x8}, 0x0, [0x0, 0x1, 0x5, 0xce7, 0x7, 0x7, 0x3f, 0xfffffffffffff644, 0x1, 0xffff, 0xfff, 0x5, 0xffff, 0x0, 0x101, 0x3]}) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f00000005c0)) 00:20:22 executing program 4: r0 = openat$zero(0xffffffffffffff9c, &(0x7f0000000040), 0x300, 0x0) ioctl$TIOCGPTPEER(r0, 0x5441, 0x800) syz_open_dev$dri(&(0x7f0000000000), 0xffffffffffffffff, 0x8080) r1 = syz_open_dev$usbfs(&(0x7f0000000580), 0x400, 0x40) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f00000005c0)) r2 = syz_genetlink_get_family_id$ethtool(&(0x7f00000000c0), 0xffffffffffffffff) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r0, 0x89f0, &(0x7f0000000180)={'syztnl0\x00', &(0x7f0000000100)={'syztnl0\x00', 0x0, 0x4, 0x4, 0x20, 0x4, 0x2, @private2, @remote, 0x8000, 0x770, 0x2, 0x40}}) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(r0, 0x89f2, &(0x7f0000000240)={'syztnl2\x00', &(0x7f00000001c0)={'ip6tnl0\x00', 0x0, 0x29, 0x3, 0x2, 0x7, 0x3, @empty, @ipv4={'\x00', '\xff\xff', @remote}, 0x10, 0x7800, 0x20, 0x6}}) getsockopt$PNPIPE_IFINDEX(r0, 0x113, 0x2, &(0x7f0000000280)=0x0, &(0x7f00000002c0)=0x4) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(0xffffffffffffffff, 0x89f0, &(0x7f0000000380)={'syztnl1\x00', &(0x7f0000000300)={'syztnl2\x00', 0x0, 0x29, 0x7, 0x29, 0x0, 0x0, @initdev={0xfe, 0x88, '\x00', 0x0, 0x0}, @dev={0xfe, 0x80, '\x00', 0x18}, 0x700, 0x1, 0x6, 0x7}}) sendmsg$ETHTOOL_MSG_EEE_GET(r0, &(0x7f0000000540)={&(0x7f0000000080)={0x10, 0x0, 0x0, 0x4}, 0xc, &(0x7f0000000500)={&(0x7f00000003c0)={0x130, r2, 0x1, 0x70bd2a, 0x25dfdbfc, {}, [@HEADER={0x44, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_INDEX={0x8}, @ETHTOOL_A_HEADER_DEV_INDEX={0x8, 0x1, r3}, @ETHTOOL_A_HEADER_FLAGS={0x8}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'bridge_slave_1\x00'}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'ipvlan0\x00'}]}, @HEADER={0xc, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_INDEX={0x8, 0x1, r4}]}, @HEADER={0x80, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'pimreg1\x00'}, @ETHTOOL_A_HEADER_FLAGS={0x8, 0x3, 0x2}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'ipvlan1\x00'}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'vlan0\x00'}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'gretap0\x00'}, @ETHTOOL_A_HEADER_FLAGS={0x8, 0x3, 0x2}, @ETHTOOL_A_HEADER_DEV_INDEX={0x8, 0x1, r5}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'bond0\x00'}]}, @HEADER={0x18, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'vlan1\x00'}]}, @HEADER={0x28, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_INDEX={0x8, 0x1, r6}, @ETHTOOL_A_HEADER_FLAGS={0x8, 0x3, 0x2}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'veth1_to_team\x00'}]}, @HEADER={0xc, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_FLAGS={0x8, 0x3, 0x1}]}]}, 0x130}, 0x1, 0x0, 0x0, 0x8000}, 0x8000) openat$zero(0xffffffffffffff9c, &(0x7f0000000040), 0x300, 0x0) (async) ioctl$TIOCGPTPEER(r0, 0x5441, 0x800) (async) syz_open_dev$dri(&(0x7f0000000000), 0xffffffffffffffff, 0x8080) (async) syz_open_dev$usbfs(&(0x7f0000000580), 0x400, 0x40) (async) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f00000005c0)) (async) syz_genetlink_get_family_id$ethtool(&(0x7f00000000c0), 0xffffffffffffffff) (async) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(r0, 0x89f0, &(0x7f0000000180)={'syztnl0\x00', &(0x7f0000000100)={'syztnl0\x00', 0x0, 0x4, 0x4, 0x20, 0x4, 0x2, @private2, @remote, 0x8000, 0x770, 0x2, 0x40}}) (async) ioctl$sock_ipv6_tunnel_SIOCDELTUNNEL(r0, 0x89f2, &(0x7f0000000240)={'syztnl2\x00', &(0x7f00000001c0)={'ip6tnl0\x00', 0x0, 0x29, 0x3, 0x2, 0x7, 0x3, @empty, @ipv4={'\x00', '\xff\xff', @remote}, 0x10, 0x7800, 0x20, 0x6}}) (async) getsockopt$PNPIPE_IFINDEX(r0, 0x113, 0x2, &(0x7f0000000280), &(0x7f00000002c0)=0x4) (async) ioctl$sock_ipv6_tunnel_SIOCGETTUNNEL(0xffffffffffffffff, 0x89f0, &(0x7f0000000380)={'syztnl1\x00', &(0x7f0000000300)={'syztnl2\x00', 0x0, 0x29, 0x7, 0x29, 0x0, 0x0, @initdev={0xfe, 0x88, '\x00', 0x0, 0x0}, @dev={0xfe, 0x80, '\x00', 0x18}, 0x700, 0x1, 0x6, 0x7}}) (async) sendmsg$ETHTOOL_MSG_EEE_GET(r0, &(0x7f0000000540)={&(0x7f0000000080)={0x10, 0x0, 0x0, 0x4}, 0xc, &(0x7f0000000500)={&(0x7f00000003c0)={0x130, r2, 0x1, 0x70bd2a, 0x25dfdbfc, {}, [@HEADER={0x44, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_INDEX={0x8}, @ETHTOOL_A_HEADER_DEV_INDEX={0x8, 0x1, r3}, @ETHTOOL_A_HEADER_FLAGS={0x8}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'bridge_slave_1\x00'}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'ipvlan0\x00'}]}, @HEADER={0xc, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_INDEX={0x8, 0x1, r4}]}, @HEADER={0x80, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'pimreg1\x00'}, @ETHTOOL_A_HEADER_FLAGS={0x8, 0x3, 0x2}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'ipvlan1\x00'}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'vlan0\x00'}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'gretap0\x00'}, @ETHTOOL_A_HEADER_FLAGS={0x8, 0x3, 0x2}, @ETHTOOL_A_HEADER_DEV_INDEX={0x8, 0x1, r5}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'bond0\x00'}]}, @HEADER={0x18, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'vlan1\x00'}]}, @HEADER={0x28, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_DEV_INDEX={0x8, 0x1, r6}, @ETHTOOL_A_HEADER_FLAGS={0x8, 0x3, 0x2}, @ETHTOOL_A_HEADER_DEV_NAME={0x14, 0x2, 'veth1_to_team\x00'}]}, @HEADER={0xc, 0x1, 0x0, 0x1, [@ETHTOOL_A_HEADER_FLAGS={0x8, 0x3, 0x1}]}]}, 0x130}, 0x1, 0x0, 0x0, 0x8000}, 0x8000) (async) 00:20:22 executing program 2: r0 = ioctl$TIOCGPTPEER(0xffffffffffffffff, 0x5441, 0x1) ioctl$TIOCSRS485(r0, 0x542f, &(0x7f0000000040)={0x1c, 0x20000005, 0x7ff}) ioctl$TIOCSCTTY(r0, 0x540e, 0x8) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f00000000c0), 0x460a03, 0x0) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000080)) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, 0x0) ioctl$TIOCGPTPEER(r0, 0x5441, 0xfffffffffffff000) [ 1222.092039][ T3198] netdevsim netdevsim0 netdevsim3: renamed from eth3 00:20:22 executing program 3: r0 = openat$zero(0xffffffffffffff9c, &(0x7f0000000000), 0x0, 0x0) sendmsg$ETHTOOL_MSG_EEE_GET(r0, 0x0, 0x0) (async) r1 = socket$nl_generic(0x10, 0x3, 0x10) syz_genetlink_get_family_id$nl80211(&(0x7f0000000040), r1) (async) bind$bt_rfcomm(r0, &(0x7f0000000000)={0x1f, @fixed={'\xaa\xaa\xaa\xaa\xaa', 0x12}, 0x40}, 0xa) (async) r2 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f0000001440)) 00:20:22 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0x1000000, 0x0) 00:20:22 executing program 2: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) r1 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000100)={0x0, &(0x7f0000000180)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000000080)={&(0x7f0000000040)=[r2, 0x0], 0x2}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f00000000c0)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0], 0x7, r2, 0xcccccccc}) 00:20:22 executing program 1: r0 = ioctl$TIOCGPTPEER(0xffffffffffffffff, 0x5441, 0x1) ioctl$TIOCSRS485(r0, 0x542f, &(0x7f0000000040)={0x1c, 0x20000005, 0x7ff}) (async) ioctl$TIOCSCTTY(r0, 0x540e, 0x8) (async) r1 = openat$iommufd(0xffffffffffffff9c, &(0x7f00000000c0), 0x460a03, 0x0) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000080)) ioctl$IOMMU_VFIO_IOAS$GET(r1, 0x3b88, 0x0) (async) ioctl$TIOCGPTPEER(r0, 0x5441, 0xfffffffffffff000) 00:20:22 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x775804c0, &(0x7f0000001440)) [ 1222.179712][ T1942] hsr_slave_0: left promiscuous mode 00:20:22 executing program 1: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) (async) r1 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000100)={0x0, &(0x7f0000000180)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000000080)={&(0x7f0000000040)=[r2, 0x0], 0x2}) (async) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f00000000c0)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0, 0x0, 0x0], 0x7, r2, 0xcccccccc}) 00:20:22 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(0xffffffffffffffff, 0x3ba0, &(0x7f0000000000)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff, 0x1}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f00000000c0)={0x48, 0x8, r1, 0x0, 0x1, 0xa3375, 0x32, &(0x7f0000000080)="fdf590fa4bd4474f02186520512b19339b0ab0bd6274adae7a3d238b0895a7ac4818bedbdb119b747f9670de95b37da0abd3", 0x4}) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000140)) r2 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000200), 0x52258bec11d88d16, 0x0) r3 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r3, 0xc04064a0, &(0x7f0000000780)={&(0x7f0000000580)=[0x0], &(0x7f00000005c0)=[0x0], &(0x7f0000000600)=[0x0], &(0x7f0000000680)=[0x0, 0x0, 0x0], 0x1, 0x1, 0x1, 0x3}) r4 = syz_open_dev$midi(&(0x7f0000000340), 0xfffffffffffffff9, 0x4001) ioctl$SNDRV_RAWMIDI_IOCTL_PVERSION(r4, 0x80045700, &(0x7f00000003c0)) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r3, 0xc04064a0, &(0x7f0000000300)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r2, 0xc04064aa, &(0x7f0000000540)={&(0x7f0000000400)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000440)=[{}, {}, {}, {}, {}], 0x0, 0x0, '\x00', 0x5, 0x5}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r3, 0xc05064a7, &(0x7f00000006c0)={&(0x7f0000000500), 0x0, &(0x7f0000000640)=[0x0], &(0x7f0000000680), 0x0, 0x1, 0x0, 0x0, r5}) ioctl$DRM_IOCTL_MODE_SETPROPERTY(r3, 0xc01064ab, &(0x7f0000000740)={0x0, r6, r5}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(0xffffffffffffffff, 0xc01064b5, &(0x7f0000000280)={&(0x7f0000000240)=[0x0, 0x0], 0x2}) ioctl$DRM_IOCTL_MODE_OBJ_SETPROPERTY(r2, 0xc01864ba, &(0x7f00000002c0)={0xd55f, r6, r7, 0xb0b0b0b0}) r8 = socket$inet6_sctp(0xa, 0x1, 0x84) getsockopt$inet_sctp6_SCTP_RECVRCVINFO(r8, 0x84, 0x20, &(0x7f0000000180), &(0x7f00000001c0)=0x4) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:22 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0x2000000, 0x0) [ 1222.256009][ T1942] hsr_slave_1: left promiscuous mode 00:20:22 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$udambuf(0xffffffffffffff9c, &(0x7f0000000040), 0x2) ioctl$UDMABUF_CREATE_LIST(r1, 0x40087543, &(0x7f0000000200)=ANY=[@ANYBLOB='\x00\x00\x00\x00\t']) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:22 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x785804c0, &(0x7f0000001440)) 00:20:22 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) (async) ioctl$IOMMU_TEST_OP_CREATE_ACCESS(0xffffffffffffffff, 0x3ba0, &(0x7f0000000000)={0x48, 0x5, 0x0, 0x0, 0xffffffffffffffff, 0x1}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r0, 0x3ba0, &(0x7f00000000c0)={0x48, 0x8, r1, 0x0, 0x1, 0xa3375, 0x32, &(0x7f0000000080)="fdf590fa4bd4474f02186520512b19339b0ab0bd6274adae7a3d238b0895a7ac4818bedbdb119b747f9670de95b37da0abd3", 0x4}) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000140)) r2 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000200), 0x52258bec11d88d16, 0x0) (async) r3 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r3, 0xc04064a0, &(0x7f0000000780)={&(0x7f0000000580)=[0x0], &(0x7f00000005c0)=[0x0], &(0x7f0000000600)=[0x0], &(0x7f0000000680)=[0x0, 0x0, 0x0], 0x1, 0x1, 0x1, 0x3}) (async) r4 = syz_open_dev$midi(&(0x7f0000000340), 0xfffffffffffffff9, 0x4001) ioctl$SNDRV_RAWMIDI_IOCTL_PVERSION(r4, 0x80045700, &(0x7f00000003c0)) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r3, 0xc04064a0, &(0x7f0000000300)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r2, 0xc04064aa, &(0x7f0000000540)={&(0x7f0000000400)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000440)=[{}, {}, {}, {}, {}], 0x0, 0x0, '\x00', 0x5, 0x5}) (async) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r3, 0xc05064a7, &(0x7f00000006c0)={&(0x7f0000000500), 0x0, &(0x7f0000000640)=[0x0], &(0x7f0000000680), 0x0, 0x1, 0x0, 0x0, r5}) ioctl$DRM_IOCTL_MODE_SETPROPERTY(r3, 0xc01064ab, &(0x7f0000000740)={0x0, r6, r5}) (async) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(0xffffffffffffffff, 0xc01064b5, &(0x7f0000000280)={&(0x7f0000000240)=[0x0, 0x0], 0x2}) ioctl$DRM_IOCTL_MODE_OBJ_SETPROPERTY(r2, 0xc01864ba, &(0x7f00000002c0)={0xd55f, r6, r7, 0xb0b0b0b0}) r8 = socket$inet6_sctp(0xa, 0x1, 0x84) getsockopt$inet_sctp6_SCTP_RECVRCVINFO(r8, 0x84, 0x20, &(0x7f0000000180), &(0x7f00000001c0)=0x4) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) [ 1222.334069][ T1942] batman_adv: batadv0: Interface deactivated: batadv_slave_0 [ 1222.336417][ T1942] batman_adv: batadv0: Removing interface: batadv_slave_0 [ 1222.341612][ T1942] batman_adv: batadv0: Interface deactivated: batadv_slave_1 [ 1222.344345][ T1942] batman_adv: batadv0: Removing interface: batadv_slave_1 [ 1222.351095][ T1942] bridge_slave_1: left allmulticast mode [ 1222.352861][ T1942] bridge_slave_1: left promiscuous mode [ 1222.367336][ T1942] bridge0: port 2(bridge_slave_1) entered disabled state 00:20:22 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0x3000000, 0x0) 00:20:22 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0xfffffffefffffffe, 0x0) 00:20:22 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$udambuf(0xffffffffffffff9c, &(0x7f0000000040), 0x2) ioctl$UDMABUF_CREATE_LIST(r1, 0x40087543, &(0x7f0000000200)=ANY=[@ANYBLOB='\x00\x00\x00\x00\t']) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) openat$udambuf(0xffffffffffffff9c, &(0x7f0000000040), 0x2) (async) ioctl$UDMABUF_CREATE_LIST(r1, 0x40087543, &(0x7f0000000200)=ANY=[@ANYBLOB='\x00\x00\x00\x00\t']) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) [ 1222.406598][ T1942] bridge_slave_0: left allmulticast mode [ 1222.408143][ T1942] bridge_slave_0: left promiscuous mode [ 1222.409669][ T1942] bridge0: port 1(bridge_slave_0) entered disabled state 00:20:22 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x80086301, &(0x7f0000001440)) 00:20:22 executing program 2: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x624000, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:22 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0xfffffffefffffffe, 0x0) 00:20:22 executing program 2: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x25c1c0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000140)={&(0x7f0000000100)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0xa, 0xc0000, 0x0, 0xffffffffffffffff}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000180)={r1}) (async) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000040)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f00000000c0)={r3, 0x1, 0xffffffffffffffff, 0x1000, 0x80000}) ioctl$BTRFS_IOC_SET_RECEIVED_SUBVOL(r2, 0xc0c89425, &(0x7f00000001c0)={"7d568c3b5e99d02cca126aa6650ba9b5", 0x0, 0x0, {0x98, 0x5}, {0x7fffffffffffffff, 0x6}, 0x7, [0x40, 0xfff, 0x8001, 0x101, 0x4, 0x200, 0x6, 0x5, 0x1000, 0x1ff, 0x6, 0xa, 0xfff, 0x4, 0x0, 0x44]}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f00000002c0)={0x0, ""/256, 0x0, 0x0, 0x0, 0x0, ""/16, ""/16, ""/16, 0x0, 0x0}) ioctl$BTRFS_IOC_SET_RECEIVED_SUBVOL(r2, 0xc0c89425, &(0x7f00000004c0)={"1efef7e12379052541bbe655f0219e7c", r4, r5, {0x6, 0xffffff81}, {0x0, 0x8}, 0x0, [0x0, 0x1, 0x5, 0xce7, 0x7, 0x7, 0x3f, 0xfffffffffffff644, 0x1, 0xffff, 0xfff, 0x5, 0xffff, 0x0, 0x101, 0x3]}) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f00000005c0)) [ 1222.536676][ T1942] veth1_macvtap: left promiscuous mode [ 1222.544647][ T1942] veth0_macvtap: left promiscuous mode [ 1222.546237][ T1942] veth1_vlan: left promiscuous mode [ 1222.547659][ T1942] veth0_vlan: left promiscuous mode 00:20:22 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x624000, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:22 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0x4000000, 0x0) 00:20:22 executing program 3: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f00000000c0), 0x0, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000140)) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0], 0x1, 0xf235955baba59ed3}) ioctl$IOMMU_VFIO_IOAS$GET(0xffffffffffffffff, 0x3b88, &(0x7f0000000100)={0xc}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r1, 0xc00464c9, &(0x7f0000001240)) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r1, 0x40182103, &(0x7f0000000080)={0x0, 0x3, r0, 0x401}) mmap$usbfs(&(0x7f0000ffa000/0x3000)=nil, 0x3000, 0x0, 0x10, r0, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:20:22 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x80086601, &(0x7f0000001440)) 00:20:22 executing program 1: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc}) 00:20:22 executing program 2: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0], 0x3, 0x1, 0x9, 0x3}) 00:20:22 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0x5000000, 0x0) 00:20:22 executing program 1: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000140)={&(0x7f0000000040)=[0x0, 0x0, 0x0], &(0x7f0000000080)=[0x0], &(0x7f00000000c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000100)=[0x0, 0x0, 0x0], 0x3, 0x1, 0x9, 0x3}) 00:20:22 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000000)={0xc}) 00:20:22 executing program 2: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f00000000c0), 0x0, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000140)) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0], 0x1, 0xf235955baba59ed3}) ioctl$IOMMU_VFIO_IOAS$GET(0xffffffffffffffff, 0x3b88, &(0x7f0000000100)={0xc}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r1, 0xc00464c9, &(0x7f0000001240)) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r1, 0x40182103, &(0x7f0000000080)={0x0, 0x3, r0, 0x401}) mmap$usbfs(&(0x7f0000ffa000/0x3000)=nil, 0x3000, 0x0, 0x10, r0, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:20:22 executing program 3: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f00000000c0), 0x0, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000140)) (async) r1 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r1, 0xc01864c6, &(0x7f0000001200)={&(0x7f00000011c0)=[0x0], 0x1, 0xf235955baba59ed3}) (async) ioctl$IOMMU_VFIO_IOAS$GET(0xffffffffffffffff, 0x3b88, &(0x7f0000000100)={0xc}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r1, 0xc00464c9, &(0x7f0000001240)) (async) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r1, 0x40182103, &(0x7f0000000080)={0x0, 0x3, r0, 0x401}) (async) mmap$usbfs(&(0x7f0000ffa000/0x3000)=nil, 0x3000, 0x0, 0x10, r0, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:20:22 executing program 4: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000000)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000080)={r1, 0x3, r0, 0x8, 0x80000}) ioctl$SECCOMP_IOCTL_NOTIF_SEND(r0, 0xc0182101, &(0x7f00000000c0)={r1, 0xd8ea, 0x3}) 00:20:22 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x801c581f, &(0x7f0000001440)) 00:20:22 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:22 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0x7, 0xc02) 00:20:22 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0x6000000, 0x0) 00:20:22 executing program 1: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000000)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000080)={r1, 0x3, r0, 0x8, 0x80000}) ioctl$SECCOMP_IOCTL_NOTIF_SEND(r0, 0xc0182101, &(0x7f00000000c0)={r1, 0xd8ea, 0x3}) openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) (async) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r0, 0xc0502100, &(0x7f0000000000)) (async) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000080)={r1, 0x3, r0, 0x8, 0x80000}) (async) ioctl$SECCOMP_IOCTL_NOTIF_SEND(r0, 0xc0182101, &(0x7f00000000c0)={r1, 0xd8ea, 0x3}) (async) 00:20:22 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc0045878, &(0x7f0000001440)) 00:20:23 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) 00:20:23 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) (async) 00:20:23 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc0045878, &(0x7f0000001440)) 00:20:23 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0x7000000, 0x0) 00:20:23 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0x7, 0xc02) 00:20:23 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x240, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:23 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000040)) 00:20:23 executing program 1: syz_open_dev$dri(&(0x7f0000000000), 0x8000000, 0x0) 00:20:23 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc0189436, &(0x7f0000001440)) 00:20:23 executing program 2: syz_open_dev$dri(&(0x7f0000000040), 0x1, 0x100) 00:20:23 executing program 4: r0 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000d40), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000100)={&(0x7f00000000c0)=[0x0, 0x0, 0x0], 0x3, 0x80800, 0x0, 0xffffffffffffffff}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r2, 0x40182103, &(0x7f00000001c0)={0x0, 0x2, r0, 0x7ed, 0x80000}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000180)={r1}) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r2, 0xc0502100, &(0x7f0000000000)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r0, 0x40182103, &(0x7f0000000080)={r3, 0x0, r0, 0x6, 0x80000}) 00:20:23 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:20:23 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000108c40)={0x0, ""/256, 0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000108e40)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000109040)={0x0, 0x0, "f5fd58cb9d2746e76481993acc06042a1ff4ddaf1f1493f41e6a2a91246abaeb0da4ba1ac598ae5967e0cea69257486302d15716526ec75a5168e1ef8a56a4f2eeabc3c6618ab9fbc00f8632153d22305c224affb2a78497c17ee389dff0e2687a635571997803e6424e6b9af8380036308cde7b2acd94110ee4d64e322ba336aa77d3108232f21dbd2e55adb821e2aaad6ebec1afd0ca2e03ee5b3bff99b732f98a49ac88d6f27cec73070cbad3dba7e9dc50270c17063ea24bb3fe9fd644cb425d5034b2c47fa14a22934690331d96357730f400bab48d7cdf137508829ad62dab15c569d327992e16c661d8e42ff72e56656d4ddddd54d66c62e3d4b20428", "d6287028aad77bf103766764d658cdc8e7d6b2022eb1c23eba4b4783c8a963c2f00311d996d50dbf403153f05ae2626d2f6b6b029ece5a999c8bc985e0603bd2ece86de1b4b37841dc53990ee9f008d6fb9a92c5ddcd2190da43987727a5fe0f33ba9615545eda8241bf22b5592ebd0db88d46e6f79c588723088524b3a66740f2ec51c11309b1b4295ef3bef371e3febd15e4b344a07f69faabccf1c08b22e55a460ff3f9c651e76ad5d9b4bcc6a70f5b592a9efc6fdce68e364992cec743017d3be70b3cb79e4ab00a0ef3e202080746eb10a460f9cdf7a02cec6cbc63e0daaedd9cc84495f346e1ae90f6b19f904f20609578dbdcf884eca33ce9a4a1161366c4f0f0b8bf768d070a8c9eb90e18a0023fb64d89a90799ba1db9f5ecdf113794ccc94bcf439b2932fbc6b42098b687ccd54a6d638a3629da986ec004d9c37c5a6c5676637dbc368806210d1348a98c843e689d0be7acefa3e67cc604e649d47423e07a2a5f665b5de147c2961b82ad2f474d8b34be886ca7ab790dc2fee9d7d3513b9f4ed86bff1a7a51e82685fd7e9360b3ab137ceaa13e8002e5ac9e34aa6047cfe0745327b27f37cdfc4de2d5229801cba6b478a9dac75bb8412f201c18a8d35d11e14bdb77dfc7367fc35ba5be2b4bcff919218eed7cdcae459d5ad6198978dd2d628f4f7bafc10e961a2a22a2897fa8cbcb32f12f457b66c3ef3be93dd8f75211644fe4053fc07fe4d280f13e1f7eca2e930372742dde4f58e0165f4c53c6be5897ab3c4e2aed3b4f2a630f05384e8157c7ae561031b5959de1cbad7cf09ffce39b79509802bec765ef5b3e31ca3849ab4cd0dfa00a3a0b4969349c80b5f567127487d8b0660b4a531fd2af30aa26260e2bec9d21a7a464106cfadd73b14021e461417ec99efb9cd4ef256b18433e54d96ea872088be30031496258ea660041675f8bbec7f4d88febd6b5753044f20dacad95e401f517aff4a24cf209dd2e171f058ffaeb99e9101170ed2dd5a5eeba55ae39d3fe8064ebea262a9b97aa7d0de302cad5b84b6f9aa1b68158932f6d2c22cee2f849267bb2cf213c107ce99972a1911c1a5b051f30228a964a449603964f814036723b88524dab15826f79078906e6324d227f3cd1325be91da637646b1e5165a2dc7a1659f8b30b3dbee2d417c5ecbbf438bfc2ab473c721bcedc8617d8c094fe3d3c3fa86b06056ea5f5ba29016ac25ee7aa97d9c45881d59fbf6c42f1bc7c2192ce1f013732300944fd7471f552c716d01c6e5885a92281aa48fcae846e986b58610cbba0adb2a2c6859ab4b7562adefa0eacf1147af2efdc28ce9e4d4e0c7b61ae719f94d21f8ae954256d370b13f142e33dda461025fa21502bba4bdd8dfba19713f6d30f59c50127e18e082d1939ed79d14d127023a82ef75e292060ed6c14394d544740eb2bb8dbfe4da075f5947781f47800b2f75fa87db44027193bf7e2b7bd616043a6aff218f314779a5cb81539710815a950d4aebb4801a227741fcafe2d816f8c3102a71f1592ceaea1e0529e89ac7423d5adc73d8a9fe2f8ddea3ab326859b8226aff3c62e977c28a5993dfdbb237becf8438e23c0e9bc05dd378bc5faf0b9dbb8ce28667dc4da9df9ca4b800a6f2c9b423641937ecb39a4a9d84785b20c3a51579f724945e04afebf306d172971aafbece46cfece45018736cae93519f65b481773a4d28fe112d427bde16cf8c5be66825034107a349824a93499e7474d4640ee98ac62710f1c8d5b57999d33e7e6f87b7208407bbafcb09f21c4d079029674c80d0e2f39c588ab1eb561edc8392bfdb51b70abe8abfa365af78a8f30e91a4da4003cd795d345b6142690cbc7ced14d707c55e1d248c2b16d5b3879245c2e3f45e87f2bc82c046dfd2d6b3999cf38f0e5c532144b3760b0a6ea07ce5158ceb133519661b02c31ff1239ef5ae4490f31cd6b45f65d7d19bc5e17d4c65009cf906e67bd96acc6d0ee5200b6c7962c4ba2f95e068dd19c153f3fd54561d9022337d8011815aa44b7f498e1970930f948ac6c267bf8611ea0defdfd6dacc09478b06a92da2727d0b0c3e61d75810ede4edc1db6321014809e8ff5b98b016e2ff8ef6fc74da98170865f3a7a51083fcdf769c198e1a810f67eb4f9b4b9a658d395999a41f7a3f43311278f9a3070c18d7d8963a536ddcd861495268063d28c5c0c4a879f70951820b70e289cdac76976f93f48c5506891e1b1d3b2b6bda3c29ade2a1272a3366fdb76dcea5ad409c736cc52e874e0dd54e88cf2d91166b98ee00f231b9da03061ecac56451e6cfb47520ac610dbcf4aa2d7456df6f5f0adbb448cb6e1bf83390ad4187674735146ce65b220651c2206974fee450e1cfee44905b87da5a07d1b1f9f4ab71f9274ce0e4e26f8e5a03fa3b58d3c95dfd395be8fcb7b6e5b5feda990dd4646773afa28d03b8171f4f8b59a10e4d58dfa71e26f71c7b906ab451cb0d5ee1e975a403995719aca3b25b05acc74bb142f3882ff0314791e4e0833e226885af53de567cce21703cf827d0740c6e9f5bb47f178cf49661cd594fc0877c6bfe05a3b5208ed5c974bc28f9bbbafeac9b1446143c2832215b0b7e7d8f427c277ad6e0d2b4c53f18ba8935a9c1bb21ef16c8b16fdaf2b4d0c5dab9460f6a319a3dd94aaa73bff2ef2b9ddc53f8c4989dc5c22074dcfad9ce461a0afc64db2651ee78e5a9486c1b87c8c91fd3236a15682cd8686d1aded75a2fca5f808e448eb329bbcec72795698b381f69e43d7b4c9528050f1a12761c527ddff5a4a7b02efd0e78c4fcecf2d79079efd48d9a1554a27a99aa45d4c004a329fccefd8b18e6261e1d3288db138799cbd6ab265b1d3ee4ceb2e5d6c9fa2139894d1a33c3ec605102cafad11d08b30fc5d268b75842c43442938f23b52148c7790b43914572e689799b9a16467fadd27b5f9f24b2e188c475d7050f5dd0a5b8c8f3a3256e864315b4f117edb94cac3ef0c1a7980b99c38b9f56268c2e4f2acc421422e9c94bea75102931ab02e20eddb89e60e3a96d471a0b772bb2a0b0d8b8bff046465af2162aa936a4ba328971713dea1e87517b54446d0bb4010522a83535ab7a36fd66d1131f8a971e0a5b6b4ac6e4f8d6ce8c29dd4bf8e96fed832e578c3a5113be40816b5ffd6960f0e22a81831a30f351ac6a4c616558244bf7ce9bd9455df085ddb601e3f1f0f18ae92cfb0e4c0310cfbb726b14f26e2458c95817f553a1052e2cdf28d97a24ef0e16db161bf8db6447be27c924dc18f81a68e1bfc78cb81ea622ebc218b6fb02d357ae4c8f801a832caf13e4d6c26b41247d76ba5e1bca53867dfee7b65f4ca912f737bf556ca60e2b0b5d9ed6f7f4901da97de3d0e5b23ac62df2592ae3347cf8ec60ab093a3bf35dc5ba89d0d4985937f78870ab01ee316b15c9351fbf3c4c41767a5d64e8e9f20149aea13eef5ae0d274aacc8cac579dc9cda2df2b5dde68c6343b62ce6b554bf21f45e7ebd0fa7200cf3c2125803af841002657a696935d1fd07259f5588e59e856de572d7062466bdaa070ab8e16a5de87fe58c5470e9090577f8754f918716aab45d2da5c5f0bc052434cc55ddc81655e7eac26e3b995b9c24480375f37de807ab1f72de88447552a6958e6019d732466ab2641f9d8f5eaf1f96b9f9c0a52340ac5566bb2fccfc2e80ad8494e8da59c12c52202195b4654bc17329109321560b562bd9a9c3616933430c10c6fa079fa9b6c5059c1d90198c0b943b6399d0a7d9a6244ecba16e99e31b3e9a64ccb8176345f24156aeaeed8d5d9f3c04a36a122f3da3f0c14be9a2ce72f3351180e4888546f53a256dd042d7eb3ab9e243463379d59323551d9fcb7e1007d76e1451e87dc9466012d7bfd540f9ce4cf8663d455881a21dec249440df889224a3b9ed95bfe71879dd23aa4cfdd5bee5aca05681132dc5b54795803b3489ca532df3005d83bbe556d616a69672bad1a31be349b1d8f2bba950053a718e819c0ba9872f62f11d611f4974985298fabfc632704ff2f9ea31c8b9a1e24fda98dd2a0fd9c903fea1cb9585ca487b8efd319244c5f4d610e7dea24603da40facc40c86420e60424fee4c37bf8b342d7e5a4cb9d7d3f8382264813c72c9e36a8a7070c07e58432d595c7afbe7bcea414c8e3369c0cff8209a2935e680ac0f6c4f90281f9a9dbd2f11db908e48cdd18164a684a007263953d5e3365ba6e6eabdab628491406f0a8129f6742579cec2e9188c09b7b2e4a2e43266654aa976b3f4dc8047f6bdc7ceea2ebd39e11b1e9aca8845fe0b243da0230aafb066e8b30d297a99d88ab267df35c40207a932e21e47eebb6b10881b22252e139a3657d36b38935cc29a6ca6759b43e6c241286d63778b320f2948e25b4cfd79039256dda56fc9f0577a89cce3872df778638a2abe8c6d30e2cbb96ef73b75bce705a76864ea0c2c9866f0ddc4abdfaaa4ddedf5e766443d0b6bbe209edb04370c24cbf6d05e96c7305e475b085ee348a67b0578a4e563e6101ccad08c4de2bfb82df8b77fc25c14c781e7665c0af5cc2b4f41c700bb8382312141b00e7281cf663cc46f6105b225a2e4508110c4b0364d0834a88fbb66ef18ce2e4adc94e37ed33e3c636cf4ad02324683445ad733667bd025985f99ced050822d5eeac2c46dc94e9fa54b71f9bba706097cb5f4925e421743ece08bdd16280ccf159f7c8ef64759832faa0f486c324c6596a30ec99424b7b255377ac5782c58fe310bfda575b5949a7c7b5bf0bd8c4ab8224e49ce8506e5cd0b9d82bba85b117fe1ee1653794bf1bb9dde07191273b1fbcf4722f9db1a586010cf9087166e469b8bfb43c79a4fa521851edf7014f793dea44fa0b1162ba37e4110600a833a5763367a9701ffd430992a665e6336870da96b10765aac0814c270605fa5653913d134d76a78efe09b7ce5c6bd8f9008732fb1f24e50ce0715b8088cdf500edf8ac848b943294162bfc228130a3bd7af9716617835f15e7acaeb26a4b972d06bf3067b74e4f0695cfc8c25049f39f6efeb2fea7dfc9e54fc121c6136a0187e6bae3c32c09bd7d8b9e90e3eaa9bfe61a7786bc9a6b963032bb1184608c29b4213f531087ae5652edece3e664a040b1d7eea10cd2b2abadf56d0b32536f3a1fec8b078584548e98efbffa8da35181c94330d795f17a2286b227ee3d9d5ccb27422fbdd1df475469401723bdb4962fd49a0cafd01173f2a89db680284fcaa018f3e408639f14d19c4a8827123a0ea6051fdb41446bda89e7a536e5b57a6c39f5602407c90a6863341a1bf8d376a6c8c4ddc39e1a5d0b7151dbdde81645c0afa9afbc693ce9d06856ce2c31c985713a6906d16d5d0a79c9bcfcc7828770d76f05be47a67a2bf6af5b"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f000010a040)={0x0, ""/256, 0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f000010a240)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f000010a440)={0x0, 0x0, "7398bdfbe4bd0ff04668df018d45baf9c38fb9fef88742de6760e9720bf5904d22d3e5ce7a73b963fdc8966023e4490f22308dba6a87fb4a5ae8dd91e28e4c08892702cbe196b5bb3c763561eeb19a2aefbf90ecca4e02e65e47de0fd579dc9f662d473cfc220aa759e1ea66c92bb637da5f26d1111da6c1efc395ecfe60307fe0fc3c390615daa01d05cd7e182ea8467b5ece9984154c2df63a033dc2fe22eda7ae37f4e2ca10051142cf86196e0f7c05d5077b99c600055a28ed570bd77698de578191a070dde5ee3e2ba9e66075e438830dcff4a91b725748a3a601839ee47e39d7cadbdc31a49b8fe5cd707a5462af4e89030c234d4b306a49825d92aae4", "e5fe8a3011c7ec93ebd8620b09f12ae70d02798dd7b296b2d3a0fb436cf76b6f82e4474864a57bcaebf9f1b073114fdca24b06124947e77255473d5bcfccbc9c23ea9c420dcfbb74064603cda01911d5581dcf8399a2a1177512c59f3b37c1713b9abfbb0d19b1b7192fc204e425eb82d6d4ebe10434fc326b2f0d5c8ecdccda4857c44594fe8d1f61144090ed51bf4491a3472cfe5ed19bfc53b33a58178dd1c14222cccd564604a455b5e32b5046c208912d5338184c51758b394a50c071af90623ffde5bbd495264c6bfb7e62e8c0e5c6ba8cd5b964e4f46dc32314fe968e7e21cf8704889d09524fb13f0820680a3f21aa09626d21001f37764cc2bfbac852c008eacfd2a042b9881bda515f8f69674197516837767ca0d07e8742a01991921fbd02790f819f35e9527f5c1fdd3a6b422d624f7b7deb5848b01c7368e1c8e9c49a14fc6226888a06ba3bb70452a43fa84eb1c4552366d0531f8d4138d70dba162dbfc90282a3a4baccbf63cbf0c06ac3dd9e236d0324c20ebf0f26b70ab9fc4a8e4fdfcdca5c5550791c7ebe5a8644cf530fa07ddc1f01f061491915538b26275803687493328aadeddd2a17a87a9c1044092a153456da1d325449599882bf2b9bd4ac1902b21aa1427024b0e642c1ae0900eda851c193867cebdc7b9f22c7faa05ba9db86e3fc22160a9f72cc3013946efd7d3e8bdacb51729ee6b6575dec0a31b01aa53da71a8e46f25198c7356119c00e44f8bff84e90feb4b8a9e1210ad71e7c40a6632063004662c0c5ade8853b3b305ac1b327a1b88736fe96f7a7164874d2fedf604a5bd29d12ccc1f1e5d40c11e54566aca9e51db09f9f2bbc38a3cad95f79b0e90f3f325f287e6b20e40f33a3047260b55543a0fd39b6ce3a73a5b20c41449e38ee1eb4be32207694ac05025dd896c73f1fa493ac49e93a5a7e70e01260de5bf1e36505bd01622aebaca9f6abf2a50a8b09454f4d2915ec89e4145ccb5222dccc4e859c21167934db6e57aa0c9f08a84c6ee228fdd03df18b2f9a86f39eff7ac8b2d5d4166a3c9cbf30c50e269eba7a778237af5d6f222c448e7235613b53c3432f2f80349eee2d7960ee195e69c866f7b33354d896b3c3485798a116a5ca198ee644b98c7d16a169c6e7c463f435a58a578e27245a6156542c0a55c23fb8f8ca067752f78f5766a037c8c151d7401fdf8082d11f5fd2878944a5ddf6e1b8e6a9653101f802fe2f9523793a1b4197ef2d89dfbebaa1c998fcb204cd361c14f300ea3f951f5a589309a5b6d3155bbcc18a9037ced1329b023e7e719f17593bbd4fc9255ecf644d3bdf9a26be92d6b601c2c1368493ff3a3937a5c2d6b675f428ad30c3120fb0f506adf4a8f1faefbf25018f5d44ab48ce84eed7b9e5faec6cd305a2be5b447b8e9877cfffed95941bdb99c9a2361e307f9027e6581e128f134dad0130258d8886cba7e09eb73e8c5187191d4d61d47ef786b0388c89ee0a83cb19125475c4fd4799050a05a1a1044e1bda226dc0a469f8b6c331c188200ccfbb888779a432c3cd2e8667c17953407e44d29558eaf1de46aec38d41fa726b7d7dc6fa0ec8c371f89abceb5d65ec3bda78def7f630b5b0abe0519ffb97b75be7fa10908a25217a67768182ee3ffd91ddc81ba1a2c7cc1ef04f3b20ed2aa6133a1d0ce563d4e19ef38de7138a83855cbfe86634e7d6fd6e8f09748264ff1e0b879f1101f495ad126a5a1e47d966db29d5a97ef1d1c14bc57f2f7953a6b4333f8ef8141f431b77ccbe6330f4da16be90b3acf2a9356ba52eb5e916e8b0114d74ef0dbfffcf6037815b6a3dd0dc0310b8999f56f2517d4ea37510afbb3c6a0a60681dd69eb96e784eb2d3f507e39829f6b3cf39fc3a514602eed2c99f90f940a17f16ceb0f7832be105b8e6893fe727da87473ebf37d71355f011bcf99c22b4a603c17401158d88df0bbf0fb1a6a0643bd27431fa4ee75e773441f1548c740bb654685e22b183adcc5215a738645f8cec0072519eb8ae8082d1bba38bd8b17fb689a8cf93f499562384918704e13f02e6321a9d8eb68334b4257dd18df181643d43694d9735dfd8fc9e252dedbd8040b6fbb428f0bfaf3676aa670d22151083f01dd694bd87f11ba45988d53026bb10ef0118894bee144e12cefde1a293f16ff94d0276180705cff26a94833ca742e80476b1cef583625f72bb9310ffa7927805e047d01a05602e44a9220457ad3759c98569ad33e365b646d2a1aaaeeddd8496cab871f8587fcdd59b66a2c98d268d5c7d19e070e615b4ab722d9e3fa752773b2834496ee5c5609a2097f25bb1dbae062e1cdfc163ea0b8066bbf79b3b94aae706ec86856f13e5a6113a4bf6f62e785ad22f8bcbe355eca682154668c1846c5ca889f9cb42ea724db0582364052bf1fdfa5c3d277de5789dd2471ec9fe54aa7f63f915ac567af42d4ee1692ff3ddbd3fb5216044c8eb9083ddefc4298574db28b00b45b216e02a9ded2beafb6212a161b838773684530f0cf495a8c35bcccf4b0d0efdec20cfb2419fb6106b25194e2fe1b4375def05735ffcde17e83cc64e6fa387b3c38a3175caccd9dae96683bb4eaf5e38a1db5026ad732375aa0ce077e8c29a89f04f5a3175aa039585c5ab6e3842bd61a763fdad82beb14e52dbda9b7c693c18b0d11e970f16affe7a42b80a11da7f279aeb3368d5aad0532fc6bb3b162404bb992fdc39be535cc3db34648b5db73fe8c0d7b616d479dadadcef36cff871a0df824f0f37f15e699e8650e85ff3a3e74931a35640625f6c51c086b9d2f5be6f5d53b506c5835ce2f9c569638f7df494f36494b5f1296e3f36f5a4ff9411a8134411c776b7e298842829881161dc9a02a9fe619662fb5990b7c3ade1c92b0988cc6cd0db77939d7b9a8a025db30265ab9db4705484d7bf714301ed8a5691982f4b6d3dc802128429ccd4687d7c4a24cc787272c1a81f32d9fcf0f9e973d825431cbf92f6f5803549539c661cd078004be577059d6c00728ba6297734a5bc3ab8d569ac32ef171742eef595c3df112e38319c12319052a226aea50dbbbeeb58aa992cfb1a7712f7af2801672085c180d1379efee6230c5ffe0ab0b8b9fa561629374040a48bb13a1ba367037c934d115111c69803baa91efca846d8c7238828ec552ef703f160fa51c8039b34c000129d31c13dc0862b3f547d345a144a95697a1a6378795c0263b9362c241d59674133df2767b41c3af8dfede9aef46e0a6694ceece9d3e412537df4bcef9edec48746af4951487849dbbc4499dfaddd17fb7b57f42f6481d2b928a136dfaac90bb3f9741458cd4bdf024952514944a52d556a7f8e539c5f954b1dd55e039b4b2c119d79a013a203e916de0daf693567dd86fe380546b8df0cb683f7cb42c56487707216c56f2091ebf52e2a2abbe4338399e1fbbccd88fc5d2c7c0f1528c90a6d4a914c757d22c7eb366d5a34e2de08475b09c286f9305bcd0ff583d05c26f13d1377d19a3eed274a63886e7f429ef9fc892902e3e42f12d0581c77ebffe2d9fe07508741625fd89c304e808ebed74be404cd36619d777fda88022fb298e57101d5523bf1198e4ed5dd3d962663f315cb0720b62e369ea57aa396653dd20de811f1afc671432936437fa53e7a76c1949d33e59a655017ccd52bd481adfc6e2bd1acefdfeca9218def12ef1e453673bc711ff70b3c8a10b0e4642a72da63aa20402fff68e125b698b92502e08240ccc754b4dd094a7c8a35a164b0690f0c1c37cd8823b2a85aca909f7dfa85e5569857fdb7bce0548d71331615c10293a7382e585011546e7e76a72f37286d09ff9ca12fb16650df6c72a3fd358a09c050b35608934180c8246531ce58b51efb0501e22cf6e8e17dcd2d053e8d517e034221b02834bbc615147581b4f707c285a7bece31a9a17c835bda72d04b594fe7ae4399ccb53c6c3547c083dfd893b2e86b933b872b0d937b96a54088f081a499a3f640261d19e64e31f148c4774bdd3d05f69511fd46debd753489319448a8daaf36c1a63b64a7d725d7430b2aa908dbc14604954cc65257660623f1a9491b42628fdafda5fe1f73f0a74aa8c4511420cfd1b587d4feec51b409c8eb371abfcf211f904f9b64275b93f4d3115da94948bde9333348e017e20ddb9d49c71e31648f1fa40bf72bd03821d950355f67134bd74d6584c2cfa0a70ade0bf34c0c8770a44ac8b3cd2b0763816f5bb9db423973f6b791dd07cb47c7800372e752508161c16988529f0cae509e5f19fa6cb87b3ae876a4490a3078949d11d3ad31071ceb4eb12b18c3f516faa522c6a93da58d31c5b78bf4d578c36566a38d93d45c630e0e5dd04ffffca7d169bdefa3115f00e6389cf3ca501ffdcb7e44c2a0e1ef37dd20def49d618588401d4ed1ba7f15d667b2fb1ac67472c5454fa5b1cb0d595ba1c13348d9e887b86ce6b4db012e406f7f36775be21e5b2c56129622ee3f298ee72eb8e3a6379a6f2f7aeb4c489175953590b5bcc43435c978725fcaffe6de5745b7e9badf2228ccc1039acbaf38e66467d84b91c30302685b49d1beb45fd14277f6f88d95832ab0c65da29731f52d8fc83c6b9e7c91d67583eeb710c06a4552468f2d841a01b974a19210a2f0e2d735d7fa18663f0f7f603bc2d1752ebc6f4b237216cef45b5e9ef70a45400ad69e101b87f0e1dd7e13d4826c4920fb334766dde41cbf1e355640e7cb9772e649f3576810f189c637e17a75ff6efb7df590624308e9c5288516341141fff9b9a8a474de9bee109e3ede08e31b8f1e1c13cefc85ab4bc7d002e0a70d05d2848a5ec042c734fcbb73e67b034e12273ccc82d3da1275779e78fc04aa5e475a5b99391c6794ddc4c9e1ed1557e564041ce1903a50446c525cf728dc0def4fb66af4a8efb9198cbb7e06a6758033c7278c75f85429c6f7b34937c122765802947a820674f21248ac7f6193707c65cf268ecf4bb5fdb0e7cdba39e1b9320877c1c86d69b48d102fc261cdfa199b817d2f5a6a0f133bc5f79e245d6bdfb882141307d496d0ba2270d89b91c4fc06a9d515388c165e0fb4654fa3af08857756f1c803fb3e6dba5c9475fac7ea7c1dc0eb3d7ec437ca5418d1984428d3e63219d41869673571ca4bb9ac32cf58feaad12314ad2bc0c48fc290c607d39455d8ae06e1395cc2a100c23c363f9c48c5460db0936558e7c1cf5024044cbb708e4d6c0c01cb4e2b83b912af6941ee97f9d028a209fb3d8971f98872d2612b667555743335bf9bf667a1023f8718d06aee215c940181784e04907a4cd495668e5ee267e7a328bab668d3603e52956793ed7988ef2482b9ab955b20cb2c52b6fc039ff6501c6f1f44e0eb8d66a391ef6faa31a56841c1ae4cc1f944"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f000010b440)={0x6, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}, {}, {0x0, 0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x6, "82deff566589ae"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f000010c440)={0x0, 0x0, "1552bc412b815dd616f0cad39826e51e7649c57e6940c443f1046f627650f28541e1df6122e926c0bcebb5b16dd0750635a55033ef80741ef13d1511534d559da9b5e477033daf65836f2ea26f6f3b8e13865073693d72fec098dfbfb5c1b1eb842e7575f6d74fef8edd98081bf36667c83271849d914f954763b58a8916cdeb475b96a91fcdfda5fb89c6fc15dee20e9acde0eaeb27e86a4c037e9f07559b503e1fbc7533bb3767eeace20c25895c8d387c2b8db17fd322d037e7a50136608757c20a142e9cb46c0e578d98dbaa003094e9a5194dcf02232d306ec455b4ddaec6d5d7a2aa233b17587e3e6f823f32770f309a9640050cf6c4708e5cfd034ed0", "2fe5bc161c4b3d4882271128cb77decce384d9d09faea517f814376b3eba0456242087879be5158df0ec6fb5722785b1795ce48637e4b4b5647ff7b59d172117ebcac00ac9964aca702cbe6931ab95b5656b7726fafe6100da4b1a48700fd1088df5f69d0ea9da08f7ee74228ed1496e61c86ed4508f8b602f200cbe01d334d0705488509f3dd7246a459052a09f7c0baddc477296cb4256a746dc00c118a899f576acbd8ffdd4135fac695c0199313f02ad8f7438c47751fa49cb677ac979bf5be05d63b843c421bb9aa811ef744b02a2fea5b39abee37ab4fcfe10aeaa9a3089b6f70a5efda3a7f120c845fa5eb10ec10be3b3fb242338c1de920b787d16ffccffd4f88f2af7324d81230b0ca6ee6f8f18d9375ca660fa6dff188567c6ee725ef2f85680e44766cbf155d87a4678a00c5bf21056dcf60515aef850554d96c228f8a189417f0adc6be1dd46fae2bbd15c6e5d1fc692ea5da0410cca7f8467d7e0e7c84d3ceac93499013139bebad54f71cdb9398f12183c7f48e0f9636fb0ab904aef2f903c453dcd009613f3a62468c0f0c9a742dc53e878b79a2dfb5684f40fea8cacf606f9e4c516816b1347681b0233e8ab7e28dd94c8205e54e12f0ea736dafc7e02155c7f6deeaf322fad3bfd5ced30f3ab1bf84eabd26793584a0af4226ffc4aaf2385e08f92df0dab7b8a851cb09e6157627b43af46bc1884191b26417c434ba2a706305e4b3b2044f5a9a4b04bd3f7c8c7129c3484e2ab63ceb0039e2bcc4645b334f8d51800481a516b08c02df51b84b0530cbc7fce049f501d1b4b30960969d41b658754e16f6086e82c022d04974eb10d66da6f30ad94a67bebb61884796b03da6c85b169d8425d514b5bb9b767fce202c149d06c6ff4c210f96ff955551fa4a00928effd5321e9212a0774d0271345f2a9bdc5b5342a14bd0f3280fb923d8da7c3143621ccd8910be95f861086ab5706d430e05ff17ae54ff611f9d1f7545831d4823d43ec252a9d6f6fa4ae4c0e8e768ca07aacb006637dab52e53dcf31824063943b240733d556b8d169e19316f2792c266c93a026531580fbc96e2e5e2c75dbef18a3beb1b6591ed3d1f07b0f19f9017929b2b6eabdb404850eaf8b3a8ce8cd36dc95d120aed1520243df6604292de9f5d429aef2b582f5bfb73853192858452d85c5c963505edf262fe4922e8c0ddc441a605aec7685b42cd62594c245e45be8fe7a3c5482160a03904fe120cbfd486c9d37d19e30cfb03d5410384c4449cdb39fa9e6b56c631c54620aab3215aab6b8203f52a0b7ab4923a9fe12a61d215d8a9a0bb017e6e61a7d90a0c12427ea47042101a1514068ebd127d44d71758ca0d562191eab5034c1c4cb32600a6d94b01c6337a75bd6d62eba5fd33f8953cb438d378da0562acc3e9c1b8c80ac846ab81a2f68205eb7c966a25e16d7990fd73e43a7d7a7e88f6f17b4e474ec563cc03efc4d6d84eec05d90f8d050f3906c9966ef295ce16325ba11f2b5025ba732db94a09e70ed1c2aa3f3c2fe8e8e898a0abd7cd4e2d42a7d3a9ea65d1990b5946b078bea9d49e37e77a05c6a7a863a34bc6b277f3ba44f961b36ff00ceee5bd4e119dfcaa9de0b0486b436cfd6959455c4d3606ce6522309398fde327094e89f50054d81a34836d387af4deb6c73804e7b320a99909fd988e4ae7628414cc4b384fbcdba47a05793d19d3302090cc5ff9470f96ef943fea28bc3a5898fd02f7336d67142d10de1bb49a4bb19205205aa77bca019b658917c01dea28a86e7847bfc8809436cb8b41439febe98adca0d53487fef3cfb41b269a128edb3e5c22fa0e4081fa299528c3885811085dbc8458810427c3f3a93af8c30cd34a61c5e8106f839e19bda6245e0d10df6308434dde79670cfc5a6a4c373e2755da491a48103b630290f7508283ffd3667148c78bc2b6f5b202fa94fff3c9d4a714de589a034363694da352561039aa145af8b087b44a218f6759cc98cdc5788f71536dbbec23eb9a78a1bd907a5fff847168689bc40e57e8a124b069995b2aede317e9a008d2ac04c6af37d165e3d11849cbc0391a80f6df948f79c236d9ed42d83e42ad7c247000d30409774ce52c917b6c1dc147841ea751a28877a44d89e07c5c45368af612fdde06c57103dc62487a09d2c05a5d9872214f3839f9a2fb78e5f5ad2f2f79b4da0f1dd4d811a557250c1005637bc40ba6a38c044a2c1fa494c43e5e536d9392da5b3e7956a85baea2d08e5a6b14d4a86716f3c318c111f6d2882b030b8489fb19f38e5045b8cd985d3747c2bb45841f9d801b65d979757b21adc01e3b47c696ca88db397105dcd961ac14076f6399b2560f0bc994a1c148c57470a333b56d842b76663b8f430cf1a8785a4c065c87fbb8d8a5e45564244a55660a73b1d28487069285c2e8510350f1859b8b9cfbdf85240870f431332953d11528a17d873c2ef38a32602a1a921d5a4041878809c96a86cf38263a1ae5d30fc874aa97c657e8f6c18635f5a02f4268a733945a1e4e16bc96d8046e0f5eb3c4dc8ec61e089a1a6416ab44b17703863ba986c1fd2866795bb9a34dcef0ddefbfce09230ca6e92b2fc3b45a100ec66b5626d96eb90ad96d9475c0b79126b635d75b3ef6a21aed553c9dd27f046c510de06d64cbd8400c78fed9b79986dfdb5c639b837abcb9960609b725343961d186737f6a5466bc4bfe6ace2216224402b013d371fc27dee737cc13a5301791c2a0abddb7be81ad1c7a7075d0b262cd7a62dbe182a25c31bf9a8ca029286845478acb9e3d863678d83699f12785c6b225ba65cfee358ed05f38d2d75ad7146aed4dbec9d5b055b7b8cb864d46db2dff83119eadbe0d73abf553e3288cd8a08a0a7a547821007ea61e409a2b71ffdf40db0d257090d39d2cca42ed6bdea97a51d0670d809cc7e0aeff79bee3906d8b8b54465e69ec343e21a8675c5fb31a9aa42d6f5b1bdbca7a80b5907cd25c3c0e6d98b6d7a27c0fe9af2dc74bab4b41cb6d890ebfd12dff410fa35f768bd61c68b757d9f3d60a93fb4204e6880cbe8fbb2c37006883a0f6e670c53ca995cb6f604906665ab1ddcdb1fa4f2659993788be7de2a580a22ded10fc875e4ebfd6bdf13998a528d6d0c3430ba8267e0ed6267b3055d79a13a44de7e0515d2a981602a1edb069388bac68ce0d1f92bf753aa88ba38b876679d75d991882b537ee90ef88d5b3a2b22f7d0975c34be431fc78d61e3ae13b89f8f9b459fba7ae2c891cd23cc4c46fe05eebe2e2f32d14d31fee5a40d2a1cc92b10c2ddf6ca1ee4b6e500fdca3a5a17c36220a4f1ccde2f3f143ba781d98ae86aff1643e078c2f369485fd3570872d27dfb67380d269ac52030851dc2daff15e0b46c61e27c5da6e23afa9411a215ddb3ea73bf7eb2a077621d087837c3d93738a97f7bc51cb50902b5a7d80890be6ce61b6ad9ddfabe5006dffeb7e26a46c9d241086d89b67d39c2a50d8bf64b3ca00cf98dff7fe51b3ec113eeb1a524779029da2a9035f84a70025985d06468cb9632d8e60f77de66b144fdc4f64737433d892d0131519fb7a50e18b5cec9e63af5ae8760f2c80504ec8ed1eb8814b866f8079edfeb6923dcc6a12aeeb93989dbd1d1c9b55f61996244e55b18517744f8ffe1de5225bcb29c7160aa5b51c768d179c80f68afe2ea584c5a635647ab9e1547b51e687c207d844303acc71aa0b3f375c44ad6510235b6f47b36d26774ffadba558e035199f12f3c7c3f2d7a2b73ad571eeeed3f2d86ab3363ccd19fb21812ede680697daf754e8eb068f5c02e7b29bfd5fbed61954b927a05345e521adf866f2aa2c212e536018b76c99fe4906dd8c1f19240a35f662a37ed76c1c363873639b2ebee9df9b4dc84fb23da7a2722c4b5f0d219deb18360ca3beb553893d46e54d041474d1bcba9ff0dfbf3c987ae9cef5a650820d64304503987a39dadf43f44188c5870ba3de9f0eaf9b0fe229e0af46513d30fa6eb8abcb40391a1bf96037d0c15a83364766c0cc64585362d7ed780baa76b32f6b6de93c40a33168980de39163de8fa370e3ed635695be4a2e6c9383e85b6c4b1dec1a30b910eaac9b36b92055f90760b7c177e86485a9eb4bafdb3a813bc32846b3f613b61e43ba4483dd967e0cbe4094c6b611863ca9bda0d30d55007255a498ea48da3ec0f779ab96493bfec3fcdda583efbc36407b65a8db2036b91dbd0397ffb21bcfbe761bf9083b8755ef3221fbc0c0c72e70bd3657964ea2425ab6c4f9a2b3ea1049b113d0059fc5e704edffb668d637fe3c35ec85141556f3b1b2547da1bafde0cb2d634cd786f3063d392d7547cc6f5461299e9e9944b3da68bd755d67bab03d8ab5623992f34cf8fa40907f05c8d1639d4f50a8a2d3655f2251f12cb28b00a8a3ccd6e70bc61aebbd9bcd1ad5bedab7ad9fcfcc3768cc07630b0ff3cfacca720f2709c09ded1d71d711fe78da463cefa4b4734a5853d5bbabef1106ec1fdd99e9ad2e3adbade91fb3879e520dd9f0c97b2aca475c105e8d08faece6298c5c56faa763aef55e5f4b88445d8ced1a83a6a06b02a8e2f28de953d4103904db93295cc7bb6084707926ee374c168bc7861e97612d46de7a1a7629d21a9caf06fd94487bdeb78674b6a8ea35ae29d29dba797c5ca8d67c02ff84cd2e42a8de19397de35de3d4fc16bf7324d005b496692797ef143440c8e6901700632f8028c6a3db70a76211f2346362c43858c315a77cedd52d278e74e1f07e86e558d68bb98786c7267ca79dffe789c3f41eabbcb36abfd54c47c0a6017c7b7dcafffd6fe95744e1886f03b078f1819e6eaede92ce5c6581f140ea7fd44e5ab2d170b724a58665f9b0ade3822915304ee23687c7b87435fd4c750bb2d7586fc2d2f2fc3b3a2aa3fad3c2c82aed843d0cac7bc0d38cd2f20534d5d0428936aaa7c35239a72dd3a4815d063962034886e0c49be1b0fd3028c348036b3bcf01a4fa168e000caf8da46251a5b12474f827346a76d142c09d3a4df61cb1b4db7538fe8ede8872bccc7b70e5f83f1fb7f1b371aac3c6988a4f79f95906bf0532cfdd2028680576b228e60dfb1d3c7c504c7f59326ef050ffe51488a70ef850a486ce298931fb770fc7b6aee90d3d1e3ad9056c769bc1acb3b7b3cca3bb90cce4e3993c1871b066aa9425ed48891f479907a9a49b4037c9df3dc87e8017085864535a4f03cc068952fbe71ae42ad3ddbcdd389b4350867ff60923b55a6950b6e7883782300ef902c85127f2d42380c1cdc3060b9ffdc8bb15f1c833ca9ffa30f6f4e7381ca04b7d9e0ee2190ef573cd500b1dbca64c3c8fd40d785904976e9d37cef47601cb78018e9719c5e182df0914deb53c88789bda0d68f8cf2c9901c172f"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f000010d440)={0x8, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x20, "6eb9dca8c6395d"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f000010e440)={0x1, [{}, {}, {0x0, 0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, r8}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}], 0x8, "f5644f809b88de"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f000010f440)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f000010f640)={0x0, ""/256, 0x0}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f000010f840)={0x0, 0x0, "9c393b1302bb70d37a43b75f36641186f5d96cd67f9a8ff67613feae89ec31790794bae1420ca5575f1ff8a1d359949ff78d4f96de66b602064dd14e36f243b5115ecbb2b8ce83ca4f9ee7fb196bb90b39c2932d85d599f5596606f97a078388ec0dfb0d37240b62e9d69699fccc032014df56bc82fa0cda22002b2cadfca180f0e0cfe44d6745eef9b2e05d117a305425007e515728742a0284dec7ffe05acf11a9e5e54a986a8d3e31ab55e93433e87808ac25b7134307c5fec74b296daa8d997b2e77294d26b0bd16a646b2aa5840ad2ddf92f1a37b28b4f4181a8a8d8a6325c3b0b00d8b404e00fa9e325ebc482b4a292ced89b5a5e69cba1034c260d5b1", "3aeeaffaf12b444b924c25680c8ea22e4eb90818f86944861933a4eedc5fd11c861fecf859cba343d241c3f03273b60079f6caa15bcfdc51edab5acc7df5198a75309d8a910b577e79a03748fdbbb843cd27d63ddd4e5948e891f797541b9be817e721a2b625aad4a27b48f63356630e41d8c3db92b2b446cf427a02a41899e4d9f17d22e1a3955c60c9564bb0a18b608650b1f04b2247d81feb8b5ac7810c990ebbb957e05631b204279e7d53d51af29b0eb7cf172d06fab61bf8eec0b10acf208e04f7aeb17e1caf27d065ee6f630d1ec90ebb1d2e32e989cfec7567a7c432d0d34373b13478f51de5e01ad9a523b7950db45fc519da6fc87086f93b80b8b7ffed003d241c921f60b45ee9b428d6533ee85cc22c2dfc84f9b5a6c0978d706b629c751b97f27fc813f349a212ee6077dcc63423b5d57f7ebc60038b347b981d0c0eec55654218f03a57f02ba68b523168aa2516f40f900c42b0bb73e537f6cb468897aab82105b87f4ca27d8c2e887267b2771818c924595d8facc2fdc81144504e7d1a3bb2ab551928d619fa31672fc340a780282e75e392520d0b5d0f6c3d1855d47b3fea0943f18ba597948a153a1fe355aeb0475c2b2c0caa276a60d1272abf1919b8eec49ff7831f18039f07c7511b9bde1aafdcabc9acc95055ce3f485a850ffcb5d00f90fb04ff419b68261589f9486e75361340f5fd40b02c838b09022d78429df6c42f58fda36f72432ddd6e1230080222e4dd48cd5feb822f564e7c8c0e0d8042d34236a40ee66ec56d025d06179ed08b0b9a53a17eacc8d69f108468068323af73b2dc79a92fd55da885a7c2b4d3f21f524db39a2ca1aeff348d1f17a41742385b23596819c802876645854ea08f13fca81fec233b7fdc5d4b0f9154aad0e3bc0cf69b2180df35b4c88b3b1bf0d8ea1703980174f5530e13bcdd6060ca398b80adee83ca9a0ac615533965ef92b2c99933f1d4e822fab32f722d2240860bd35b1cffa67502de7ecd6e13ffed9babcc6c3493e17186c3309c84e1b8cf3d7d81af0b3c751ff779f1e02ec6980b765b1ffca82f7b865ec7c3a70134d1435029c4a7e81767947b938546762617b9fc8f6fa3f17b4b51eb64217c124d6c730adeb9813e940518334d5be63d75188a472b3bb22b5d12231a758e32f73ed6a079c9d24981288033bb5752eb8d341e38c8cc9dfe1f3c226373e4b08d14a772a8ffd8f0843fb2021184e0ca855273757cd9d0443ebc54d80028b15672f3c9e1fb6a378292c5bb1235c54e6d3cd528d5c67760a6633ac8579eacb45bcdd2d5992870551b03083eeff19f9cdf614425878577be491c8df09f3f3d7f42bde199c1fc994ad3ce52b23e7766af283ba1dadf99589fd4f7b57a8f533700a68f12bf617d08d65d77c24ebcb78a80f598cc4722605066acf6939e8f822a54895e1e471b6c87b64629409aba211df351d9e1211827f1017021d2152312bbe5f4566ea0e530d2b47e3443e8ff6a69ee03294364690751248d962c0840cf69881c9b2e98fb2887fcfb4b4226611165b5ddb44881371e0eef00e58a7626c0e3e7dbddf48f62967a66c3622e7839c6d3e528dd6b83baa3b001fbd462878061fd061021d4cfff83895a131881f3f9e7e79776c37550cff8993bc28d2c185c91e28e240d4d5991dc499b101915174200ce811204537a984efb62756d50bbd91c934d9b14ee643c2c9db9f7a43fe658b9084007a80266e8628cac477bee3ed57af6420d734b332f8a6a3c5ce1ee509b0f326db56f1a4dda5156f3ee6110c3c2b10b9994b2762ca8ed99dbe759e51668298df16adc978337ae1e514f791a1482dd1bd123c40604619e32c55d8d20798a25f139b263696e8e81dc9f21b2c8245a852e9a8b870afc34a785d2d499d3a12737485a694b61c7a47283a6f9a47abeadc0d8295ddfe819c48847d396e9fcf10aa154682e51030b485e26ea0ab4dd7ac9242af4613a2bd7f6c5826a833a6867a0b3af58ed4e96ec5a47afc50c3f759317135d75c96e014bf048c26b8443b6ad4b18e93579b2416b925aa8e69a2f56387bb5e57e0f27acc6229727ebe6b42053cd5dfd5ed3ee8864e016fde2f8b4f918e7816a5eb2fb71e36252990f3f0c3701749bc9647fe29c21f9a7ea119f96a945ee48c48e3a8745b4754aa771f6f69fffb5614deafbd9d7feb220abe3f874498456a6a9cfaf4c755980d1f9f220a12b598ce98d430dc59ee69dba3d07bd1207b0ccc22698c9c522cbeb97573a17e40084dce506233357cf2c5c840e31c967376224528cd1b987927d342c28c9cf393e35dfcbb32a023f000040b689b8f469fe1f04c68748bf9cf2e93c1b13a41f4e9ddde533132e1ea59f94df48e91000af42a836093ccbf41be79214e144ab114f66d7a57cc83b2911f03969ff38c2500ebadc6d2fffc4126e42a3020dc06c49c8a88ffe4e9a0d5b7aaa676ad29922c0483ba1b5cb640d7fa7bd88e76a7110fa79a7f2ffce18e121284b40dcfb8d5b9d6c2593925629ee71cd3c2dfd4342fa2ab4aeec46bccad6928e01dd0371ac1cde47b2791c49afe9ffea3d142a4fd9a388375aa23d5ee6b177b2b35dbb0d7e406e0edf01d4832b9ab758f8ce7008f799e8ab62a3bfff2fdb08e312fc1d418a459c27a7c336e77f9ad5a983781af660f58aae7e9f8bb144f7ae510aca2c537d7842ea4ec013fb9cd6ee0943629e737ce74514e38945f6ff3aab280a816a19bb3557e772f76213cd9eb628a1d33c641bbea9d35169e07f64a51656e38b0498883d4f8b0e13977ea5261a112bb5c1b0d2cfab4c00c57ec2982d1616798b0228f8cb20264371b42943fc488752a12cec3d1c5c28b0dc556424515c169418cf972f7d50700cbf065d44e14f23733c899e66129f531970ac7b8ba3a0b338cf87a62add2a81ec29a12b3bdafd3f5424752a5d92d1ff9b80d619a782d8874e1e2a2831d15781504223a8bc98ace99615b0541cbd4da50f549d9de7ab7c5479f2755b19963092f151eb848e6a624d988846c4b2813c3b275d2584d27a74ada73698c6d95869d433d4453580f34323aa8b9c26459d1a6e44daaaa03ea99b8defe11ddc7b39f94dc4b04c6b8ec8ffcef4f518c253d319576c4edc693700f68555fe584a96e426e03675bc6c511d2da9d417b72d6158778dc1eede4aa8a8661b126b6b4b69c739aa6c058b050b57ca08ffb6a716cd1719bd0328a4c75761a1e71c32e916fd3dcc9ed0e6e59130186c43a8a8f9134240cdb647b23bcf986df33f5cdc3bc261b6215a3087a0ab409e11e3563bdc481273a5d210eb783c389755a8a6bef60f4c1ddb0da0504b3a27262a950f36899b382c4bff7450103b8adec1f72a72103db0ba2a7239a1aac68b5978b9817736b856444bdf7e380eb65b21f8642b9bf9f3e191c8efb444a6d46dd164b2c1d28571696190c37671332be7f8a5f78066981a67b4acfa70185ed53af602945c60ea147664e55cf084af7c4696ac11ca1137a208effbc11ba2365b030daa7afe70f4b83f2193a480c2260064ff6e3874006a35a6a443c54ed5e97a2197b1a97eb0dfe732aedc1c33b2fb8b3fb780d4896acf634b24ff8cb5663b72660d8a43bb589705d4a6111e08b1664937466e50cfac41825191ccdcc9de30e1f5b37f1ef41d8d0e910c03211faf4939d956b48b6e5864d514339ae844c414bb38801ea74ad2ab342ca8458be9f6b4708b833cbeaddfcdb819889c58be7aad541cd9013433b4117650500e43e3e51d366a2c0cb188091e609580c538082064499058604d88a0c55786b290636f8b5d4d574559b3d1ecfab743c1e3e105991af0c493e13c0c30bbb5d8b459bb4e0db64580b74a7c87d1bb04aba7f6d6addc093ee6a9e4459a11d5e924442b231e402608d678ba52b09636ce11a8f5c779c5f7a7f4a82219c31027b0d3c01387a0f8752824bd25a08b9c8d5a212bfba0cc7f1e89543e8575caa22c2336657c6bda92cf72735d93e04213a22de9fa874794c41eb86664f0426786daa3dac81e8ecaa829d8895bce918a6068f6d6cab67026abb45490a45fb441166e31ed2df19e11e98c6c1cda683ad934e8a871f8471672ce88e588f8c7e567a7f30ff9e73fe30afa4a45c210195e116f76732d71ab3ecb84915dd57b0f63ec655231d43a53d624805c9de0e501060b7fb0127461bf31b4bbc6847481db69e95a8733e38ec0583116d4a4ce858b0cae1c6e6c700227935df9d4429ff871ccf3bd891148f786750fbc92645fdf0e0fe09305b77aec34e067c51594da3b2a7b2436089f30a16c6c38acf1de9f9a9fa37f5c2a8e647fbb84a0fecdb01e2651810075b103bf0687ec257cfc652001505086014534133126970d11eed3801dd7925037cce14e7d9e0709bbc582b7d49c6443d2bc80481e8e6c63b3234b703b054d426a489aa5b2e6ea222237f40b1179e91e1079ba55ecbaa7054c5cf27aa61e24937139bc4f83174fc7d817bf2bf7499692a82a45a1c6c308c23adc3ff8412e0e7b9e7e077f3093caed14bd6bb389067ebc503b66646f23989a383c941eff8c4bd8b6b3dcbabaeca828682b5208b0408949b713a1bc80e67fd3df6bf17dd0babfc0c093bfbc7e460a797a303e6faca9fad5121e1e920e2f8f96730622bdbf2c96b0a6ea8b690b50a36cc4136cda33eefb671fed4c6014827206539cb67281d6abd43a120243be3a099157444e6869a2d852bc795cabb6dd35c82fbae30e0446ea615ceb69793e5445d73419c44b3a6f88288605dd89434418cd0162b92adaee60746a828f415ba52f64d9db470895dc91e9cfc569c6a152b36a40ac8b339186448b4a34af1fd1579539d4bfcf500d866038359e23b7d6cbab14e5907e241c3ab01d6c4b050dbf558db0ec13669bc7d3b96ee49554d236a1ba76a2694e0f133e48942b812894560d348127a4586f863a6680c15f25d402d05e76b04db34bcf96a7313cc70c88fb64f17a1d242e01972ecb2f3633c13068bc118f0ad705ba7f10965f2743f359e5e55310f210e707bec6381660b85fed283d045f324987eeb59dd855ef52d1da216cb488daf76b3cfd8d85c842a682056a5298036699e447d9359696dccec254312becf25b931ac21c14277e66136656ad1fbb0bedc8f30076b694be3125367ff6b07dc5bfe00253ca7e6dbc9ff9e70efcdf462e328afa6d5916dd069ad3846f4e8d782f66a5cc8c84e10d46d07f47e0466964a628a83fec279fd7970417eca056ded566d9684853c1807d83d9ec93239c2cb841d43783a1a1141523b77f3b07b821c3dcad97755f88fc5c590ee5d7144efd1281dd1a1eff8daacdd7b97daea9eb5198378e6f79ab9466284f5b8168d34c04262b565d1d7030a6aa6aae3eb3f2c432aa264c758b8fc1fdcce69b"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000110840)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f0000110a40)={0x4, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, r18}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, r12}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r17}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r21}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r10}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r9}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, r20}], 0x0, "546d26e96fb4f1"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000111a40)={0x0, 0x0, "99203f9d93b192bc53e1cb48e9c7cdc52033758f5eb09ee0933dff9e3b264cffadf67452173b038f488d365108a7415e00c2e81823c1f7eec0d4382e158532d07992a73f02c3d2ddc3b90bc980ce651ae38d96414360ec2a3cf3f3e78f7afd76b40ba3660dbc29c98872fc952e515ebdec566375b5e71fe6fb53990d8c62b488b28b6206833d97741fa82e07f469c04c6f41fea359f32b4b2cb8d13b508eaef484901f4bd936564f76c81717bddac14f8bd9e3b0db0a295e5c291b67f4c13b1f628ea70c79986df8f4295cc276287f457f72ba39f1b19d83c861c3fa5ca65afbb176bb6371a1b01c248dd166e00e39d217b092fbbf19c09a04fa5af9a34c51f5", "97ada3195cc544591dbb8b52fdc0d8beb033f168eaae16b0b665eaf0f06ee82effec83903699011d0d102869a28630a356997d7c424fdb49abdb4961b559be9bd6bf511937dda17cb76f7c8703a3839ceaa9322635a09b9eb9d66cf54a8c4981b1f9f51a9a71fb10e85545befc4c37a44e9d9f514750639393504f40122766a0709ae40a60a11118fde0e195fd6017511a56f4cac67b4efd497ff7c47015b8230f0c145a8fd8300fd8567991713fe77c8052fe33e05d1a4d05fc5a23dc77913f03bf8fbe971bcb75ec5b7a3d5500383eac9f58cc97d62c3ff9a70df6707f02b341346e575167829852f77ab8b48d8d6e44fe104af2e20cd61733aba0553d242d5e2b4b71291c7fed8b5913ab059f0f538d7f388627fdec229a4b8379954d47ee1a64fce35fa1a419e98b11432aa247dbf9a52606a13ad12c6ffe81210bc816842a9a1a83b6cac41a12e02ef1dcd045bc6dd86e177e55a317c12803352024e3202f9c3bee79176ce48880a431167e01d8e06297d6aed870994c0ea1446cf2c08abb8aa02f99d586b59b3b92ad40d5107fc43292d2cb35d9402e8313b20f90075753de28d18d8087177e18e511a7fdabfa666ff93e9682cc189e5353f0457de34e8a3d7bc7bb5359b07b575b171032da07f4eadf75baebb5d9024e1002efe0633ec1ea36cceb3f05333463685965b27ea50c3560ba840939bee9e0a56f3d45acd9477a5460c0bbf92493f1f4fb2b2e20d5086854aacd5337e7e14d95330e94461bcc26ed307b5059ae99c2cac9f9070780e0cb027fabc0659e7590e50cf5a15eaa4c9b56422a2e0b370c7e375439bc5b1143445bb488b125bdbd5f3124740c00dd6910f1cadcc2aec1d34c7b00f5f9e294542104269c8fc8224a727094a0ffad01c564f2cd6fb0f67daa747744ca9c9617233906e37f751f3190e49d4706ff70e63e154bc9af6db191afaa0a1656922df20086045e72291e41eeeb0e2f9ef17f5df35c8a93b09293b46ee312503c7d2232eb2a8ec3171fe6ed01ca976e3f76d34ba1d9f54016cb03b2bae4e40e6207317e95b6b4fd48393d4fb22c27d86f1ed58db2218fef92b5181214637217af24c22d2b45ad35bddb08ff21ee15d172bd6e23959827ebc699c824435c8aea3446d2f3bb25f28407a7905356a6b83b48fcbb49b54e2353e62e58b796f0489a99830fa3a85ce548253705226bb81e8d915ee9b93c8fb585d0079eb117d10a37c3835f8e5ca19719c8c9e5480c57c5104e3de7fcf9500463c840a8b2ad5bb69b37e7daa6a6eb6b940740dc657c2b96830ce021c1b23dbcbdae438f3112bed679184cbbddbfad2cc91f538572b9b2e02a3b3ab35c8e3b747d75de0ea7c2931c24aea0ca60bf8b32386c202cdfc30f287e43fd07ddbc8c5a28989ed482299378d84350f5977d33abc075f00b3235cd96f39e6a5e855cbd87427ed051bc68973dcfd51a193cfdab6026725fceb6656853b79492734d6ba70c6988168b83567d0625034dc4630e740de1d67b72cb72535f7acb978594ddf2f1dec40cdac5b6be001eedc7a2c77e1609a85c1fed41973c5f86df465c571efcbca6031f5c99145fb34abbb233f222913a83d908ef80feac0518cb0a9e2cb54ad2126a22777f0bd620bf1856bec8c8fbd0b0242ddf1e429367ab3c96922741fc2e96c31378e9e4b47fd04068f265050ef2456ed3014d9044e34a86192925eeb178098ccd1458bf35b3d6231ece087e9362110d3e0158193228bd235ca18fc4b258a1e8629ed6543a53f21ad9142b17c26fd99db7d06467c6da7fd7b33734adc8fd5b2e2781badd3fe3ed86cc5c5e0c601bf96c9d1140a776c7dcc155780d98873f10b805133c9a77b99035b6c45e78fa6a24896f038d4eeeeb469cd798663c03b424adda2d2caf5661c281fc151b4a92ef3a18025c8df1de976f3dc35554e90d059b5d753102a409530eaba273889ccb3179810c1e7d87293025072ad5c88a4a7c011ca22c33dd8996395f9efe5951a8cfd9bafa75cd25c41269bbd39f7316ec4eb6de7de4760e8b2e002e4c393b83274453cdc835e3a10241112e2a212ff5bb93c34d0d84181e9a7ca6cde5a786c686ca40d2721620af844501130b1ac75982f62a52b5f0afb3cfce5fa5fd02477b2f4760e2c5c81a01eabe60d33c9ac769d4801d1d2ce8e3233d40f3cb9391f6e1da921cafb39a3b6cbd691e273a28ea9a81de91d87a44d3d3c49a767c8bb53898cbc0ab30e4e5d64bc3f5fc8e3c9481ead22202c35df22d20c6c62e96916e159ae8c42a14b90d62f9454a002cdf8271d8d2d56e0a6abec4252fae6d8a56ff5200493378fa2c2c976bf8c1e8a11eefdf2fa935ba900d8f75fa8d4f3473cc8de33d982b537c8e6d9f863c0361d0e533db4b438693e695bf2c8c26e3ff305d9b1d892095abd6cbbe7cb26e120e478aa3573840cd487229614942554dfdc3621c44c0303e6dcc3c8e83c423b347b16b125dd0989834624af122f7cf445e57d1d3e3c329321b5732d433431a7a0bf637244444a9c50a735aaeacce81bbfec8c5a1a0d55c19eff11e7979270688c60cf5b880a859e31feac30b59701f948d3cf542a77dfc93ebcfdad25d06fc81bdd21d1b79eaf16da90eceba39119fb510d6a368ab445aacbfba94c259f3fb459ee2cdc51cde9743f2519573eaa2dd33d0cc883e8fd2f4cb410b1caa653aec0b6fc2036498da3f59ec7e0e68c60f2489f69f667d68d99acebe2a949bde63e6d3eb0b0b33c9e7f2bcd92eaaba1cf5e8fe855ee6007d6e3b7d9e964222b10a239620c385b2842a8fbbf32008ce6386067dbdb0bb1340236aba4f5491a7e873737b5527992ba78e3c315b0ff96f6b0c5b5c12e6afc564059f72532fd71717e60a380031f645ead011df11983ba9e2be711ba8572f73a86c89fd93d3a83d4faf1092e929eaf4965b691f66fd76369814a3ad55acb47b7f067299b69100e4bbdeb521a79957bd9b297426e6a0c089862dd45b27bbd56596a932514f6ed6e9bc24d000894d716ad180c477c9eca377e59734ef113273c4a7f670b3f3a5ecd0e0a80cfc5a0af573d810b086a4bc4fef7e2ad56388fa773bfc649beb89d063e4b4890521173334e7683afb9c921c1cc1bba42a14697d7a21cdb1ab1dd0923b0db46240e82b64e157d94f43a98808540747e0b9902177c2d8b2cd5e08b61f4ca0395c47ef1edca09a43e958244b5de406a276c3a7ee21b47d693fba90ade97c98f6a810bd2df8198b11a88b3359e8ef04a43dc90a695ecd13ce0a65bdb6dd57b85c51d07f6233fa87abe939d170bd34f280a4fc0d564c452fb4c69ad6eca28bf9fbbcc7eb5fc1bf114c189fa7fc008223dc8b221378786d3b80c45c8f7e1974f3cda0a8df525fe019aa406f3cc5aa1c82b83edfcfc84eee11402d99b468e7661c9eec1ffafb06ebf36c00d98f9db7b14541003c99394f4c9cf0d4a49b567e8fadf67aab9335a616f026edd144f9588b9df6533f16f7ba9be02cc2e6ba5726df75b5b7eac9f7374396ff64f59d6e660625af919510b5b3a2702ace1b85b5a4b3ba4234eb285224db5bb2f2264b86c48713af5de53e8e84590873ddb8174fdff8c26a8849aeb3f9547e645bfc93be57a4998edfa6951c13f897fa4d252ac99b0aa00fc7d4eacdee10523e5ec658ddbeabbb3e62ce8e301d473aaa15a4892601651d66f41f5135ef77edbdf1f7f5e6e512f6055619dad10e872316ec8bf2d7d4189909e6c422456fbe570c92a46d28bac257b13fbc513781d28985333d030f6ee76c31c8d1c9f8b343856daa4dffb1de1efe31419a74a6af826bfade9a90209e9b6c23f721e333910de8c665fb8a3c160815dbcbd6d24e0a0bac9a583f21b2cf4148c1c654c96368256d60f039b0a6fa9dc50de9a6fbfc6afbea8c7017e8425a37e6cebb8eef1baa267b81da54c042049432e417977322c960d8bf4eddb01be9a93b9db47efd72c26cc1436e7a4a346ba1b467959db32c2d1b222f0f78cd26f96459fbcec8d1f9fd6bd1e73ef7ae331ea35280c857f5618fc6c31f9d667d772a5ee4178652ec6ad1b88a24033aefe60b080793299c0e73e96ce12c7781a5df627d099e161446d2eb9a98cba220e4dac6d8bcd7d3f02d8013828c62714f5b4a7f721ea982be1f8531087d72bf04adc0446068112dd627163de2f84c088139a23cd819baf82df93665c1cf8399869690a3c178f7460b9a5ca0cd8dad23f57aa3b10ad5b683ab28322f24b5ed099404cbcf51fa86a1951d17517a659af9a5a2970c163bb12ce4a202e75e67eb6600b49ff3a078e70a55c73f684717502576ab3ed2b28aad90aacd1938ba86c59092d708a7879f8fddca76b70ea298832a29d17113e1566c525f94ea6fc41a8625ade7332c1f14ea91de7d859c6e95a002c4ac52e7d8e5876cf0aa1a3dea79b646e8ec42ff3d0e42fdc4e071c3da2b68dd1b89cfdfce7290ddd74f08c868d03c290b40fd84fb0be845ab3059cb8394e5f1ccea80e567e784abbdbd1b9165c1eb29ca2c19f63ab0817710ef3dcc6cb39317492baf2561c65c317f0b178d2aa0ea44fd671eb533de70d3d3ff07bd89d7bb0f8b5d6aa93cca5fbc6950091af8d92052901d346609bc1472d5e2aa04b8aae92a0ca472b33c83c2576b8dd05bd7e80fe8f95681b60b487d4ba18a528453584685f621533b60c46f09ee88584d500d7c4bb0b2c247e78d558463f9d825e554de05b245a64aef4865e3a62edfdc7b0389bddad0cc0ed8ee40134d5bffe559518b9e0def426b13edd07881fa28ac18f070f4c206b0e619bdfd1fe013737a15aac793f604d6eb3bdbc3c0fa37a3dd851d9ae3284ff4709335082e91037d23ea97e0b5f6313962ddf7d9b017a52ec59d6ad2dda0ff082337e8b0dba11f4de155855e51a6425678da5237d3647bc1f55791b6cd062f36b72df5f0f83ff2f5836f57186ce9ddc99cbed2f4851aa6eaa1555579dcf365244365149e50e9ea648ca75a4b6faddb9ca784b23ed3925a14b3822b6aedd8cde8d31b4db1997f81cab35423f090219c935acfc641ab44d7f01353d7f1cc1c87c414b3580a7ba34b2825dbfe3072c55364cc1d9d1101ad9528fddbd25e73097d14f84476eafb01be7f594216fc2e6072dde34a8a1b24b54f435e4d44b78b53ace3e2e4faf82540c0626fc36816e6a001d86371c74da7ecfa4c2e01db3b5a6bffab5d8bcbb88e37b24546750947c3b59963d5c7008767c37b73413d70bc8d9c2a5e5876c889b82e9c429d5a3d6d9689f2e6e109be54a086bc5d385b0bcb463ee94194871c4ffbb4d53f9b21c06ed256d50f0429bed3c84847ba5d54ab8023ace761c5a8a7179fb574716d363b4746c6734a8c2be9bf5c0e057bc94d9384159cc7d1d758062a4fc3cf2780661542"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f0000112a40)={0x7fc0, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r16}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r11}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r15}], 0x0, "7982d933f0dab3"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000113a40)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(0xffffffffffffffff, 0xd000943d, &(0x7f0000113c40)={0x2dcb400000, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}], 0x8, "400269e29b8298"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f0000114c40)={0x3, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x0, "8a18608fde01f4"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000115c40)={0x0, 0x0, "df4c741514078414a26c3ed11f3ec1d9fba039ade73aec48228c4813156cc6183454f0c572c195d6f7a68743fa18124a2925b35b2815d979d6b1647e4f59a3084dd0618bae94af33658cc630b783d629bea28f053d9e2fd6bb9fea2a2cef94f8380c2d74b03dcdb0a3840c7ee884a7a7b9d8462dea41f86e7a8903fedf5dcaeb40a8b93d87323059f50a74fa919d7f97901572967568ec4ebf6dc8274b1a2624806e168fdbbc3c688f777c20be204db8dafe0158c448bf85ec8523c56d2d536b888f76d74202213e1cc65b0e048e8975e1deb9aeb70831275bdb67df0bc1a03bf1fff7f320fc25e1d6a7aaa828b2e96b27665e693cf309fdf2ea25dce1552c67", "2611fdf555ce8c2ffbfd4e8f9f2eb69d1d2876ed707aa47684e757c8f50b36a012fd2a8ba9c54109b675e64824ebe5775d96ae080d8eb4917aed7eba4d51033900a34e26860a42071e8db6e6ba52ac822f47441f78eae48d085bd5216247a9a425f9253b2e9e7ff4e14c22d69ad96ef10131705e597493ee01c77c4fdc0010d6451fceb0511ba2c81952151e784710d69c201e9008226477627c09ddb3981980763d645e9ff983639f77f88743b77811933d49621b46b99a99020390fac52a980424797fc1e302e18a4e6761b0a762e9e6306ffd1ab704e2214d8309e451a97394fe6004bc1c7cfd956d098ba1c4744abbed58ce88c4eec38da9d2aeeb8cecabaaad71a4cf80460e6346d59f9460690f5113ecdbc8bf485d48510790a81aeb9f31e7f2f2ca3ffb8e843713471323fc9680a13a5356bee98de1bbe57a19ae0445073c97a39f135b40ab09e3a57e7fc3146c309611692cb382210a388cc607c6c19736cc58c1e429b6ac461daf1c0280568c5fe56d5dbbe6930a4d123a0aefca3464c3c7465500fcd7e919dac77801b36cff5af84887027f201773f61e6b48a89b8530cf51f594e9952ee652339b56271fbe156d3b1a3fe4ad908f0cf1bf50e4edc1d10af9059f5337821daaaa49d30b5882461de7f94be600c0e81952a25960a0711861c9f2242bfe73becd1b9afac43fe8fdebafc32ae5a348b0932d0c0fdb0426772ac05e00410f82aef68a88ea8d456ff36cf69155a30b7c9099402723d7405253342bf88e1b5761d6db7c7186d39a573919194ea6a7db9945d74f39131af0b77cc71c29fe6991b593a450a2800c6e82fa4595958939382101354a266aedea256a26ebdff65cae806f9ad7ba7ca5e4e90d243cd2665bad9d040e12bb52a29353b6eb8f329fc49c08f0963626ef6728ca13ccd4d40a9176c567d428197b3bb40f4e817e52c90a9eadcd5ab17018cad8e3ca50483cda21f1f10407b57a45fa4c18e3509bd99934c5fab265a7cb2e41e61f322f636e998e55ffff6c86ab59a9507751f66e039b130053d506919ab230fcc090a869eb579c8e7a7c9b80b3b37b3b7d4d5b8ba9478e851b4137d7e485216f416fda4507179ada11853ef150b3829269e8722dd993362e5ae3f24ae88a0dbd71c2504ee3fc713ca4e5e0bfce1af96899b83ed636cba33d48a3d17914ae5ed5dd0f4b4fe117c1f63b87428fdc8e2bb61bf39f1515dc989c7ef7a0c9aa8f7a3ffd1145eeab2853a40087ff42f1a61cd6cbdb00ee7ccef5dfe5b14b2236b8e923348e09424d1c2ca557ed6aafe52a356e6462b76be8304b3d7fa80c53a7dc8fa8e4a55febab054c2ba9e65a20b937d9620eaf22122d6590cd370eb323b8fa9b87acfe51ae56c02d3e130ded6f969562a5a9e67985646a70776fd9c7e5885fc6a9ab6f997d7082bc2b744af471d9434eb52d8fe19efdd5eccb75cdb037d7dad33950ea8976ca8ef51e142733f727c972e3404b69dfe55a4f1df8bc3088c45b0c05d886cf6c78c094f616733695a3482a7ed5d28b43730efbe4fa71007aba925ae265846702001c6feceddbd0dd89efb2844d08d0a950222643a53112cb7a6ad39aaa87ea306ab9e8142d1837c8ed82d0cc92313d3d7dc67fb57adb0470c8c5417310e5f54f9524aa8f25bd01853550a8a0a0fcaa3340b6aee239fc3e0d30f299b768a9f74cf521686d3fb5f233bd95c7b4120a913fe107b4cc9a43be149d5e1bc26f33ecf57888132ed146216f765a8eef3d988e834d1a5d352ea944145ffdbd2db976bb4d8ff6e136df4301e060c313d5f72ceeec477b2169f8cefb6589a41b31807ed71e781fc6decf7c822a7cd227eb1da185620ffe54f0d17acffe2568af921570afd77974b04e1c6442dce26eac8356cff4d3088cc1e7cc1c04d047d345ea0e7e3c779a0f9d766195f2b42e35cdd82fc11011498ea28a2e25b3b562ab5ef3f3274dd7c1eafd38c9e323627efcfc3d831cb5c4c4a1e743f820622052fb3f418ee380203aee02249a5b9bd28bc12dda2c4ebb2c1a3961f232f6361633942686afec4c8b4b02ade96a7b8c1b9b5ba9b4ab08bf0d298dbee764df7f9dc338d4c4c658f93c6735abb4549a85afbbb935e79e6f2269c46d68f9df1d6957275f7ec27e47dadba07ac0b4239959293e956c218f70e1a47edb3360deec97a9bdf06bf87b571799f193a6ca5053efb59c63062ae1a290048409edfeec83a03b251859d4ea6e4fdc233d3c67fdbeea6594880e59798ff61442eaec1f28e34acb3ba81758f3f04d6556c4b73532c08ede1396897379f01c394c87db124311cdca8f016f1ec3b837da9f3fadb7b2611cda7ed58e9f3f5c0733321007f5f4f96ce8be8a01d5c352892e845897fdb72eb506697c7a15141a26f32f35d3fd969bd55565e6a3935f9450bb4889254fce02d4c005e518738e7f969cae7246521b7d4606856ff6533b95382ac612a2ebff9ca86d1f930567f604957702e7ea268f580b4e75fc2d46cc94beb78d6efc8db6e1a29fa8cf001fbe287b1b924e5cf9b162a6891bd84cd5cb86bdd7c5e7487316d01f543f84a57e030d96f9a1a560410e0e678c14b726755dce98098d9737bf450ef21795b307976c78f29cf680736b1141d24a6352b5c8cf35dd860a2320cb632ac3aa8e4e7e0f84e1dc94e0772eb0032e2618822fc809f2510f95591365aa8fc63b499cb6ed8cbf491b47d9f32f7e31815f42432d68a40fef3480a8e25f245937d64e566a0e58a5b82691ba4540631abcfefddaaf6689935091c103cdab3593894f048b33bed3556fa59a4aea13537481b8376b64d90b9adc575786abaa0f363ee0c50282ac7970f159fccbae559a0b61bcfa685ed5934b7624cf66050c85812a2b970fa3d7599487c0d8f0ee602f84edf7d633ce55b00b0ab4c61e3f38c7753d7d42e5c58cf7dd8b94e66483ceac52966ee28d14486cd6fc49fe2e4c5ab5dc93bd2345499ec5e47f375c77f59117dcae661977147685df47549217b73596046b6b11dfcf744438b1d95b613e84feb7fad4809bfa46f9669c7bce2887427e03f3e3c27b8959aaac8356939e4611522e30016062f58ad8fa0f253f84165b2d3d1be6f4ca5286e1330503776a94018707a30211f66ec30148f21e7c56717b7224a89a12936422e07aaf8ade48dedfef466053df06fa51818c8c4a45daf8a1c5b5905c360df9031868a11089688ea09f09a3269e50045870ccf91fde5562ab0cc8c1682577e273eaae726a54053a02a2215780907db33636ede5430962163d2f3259adfdbb4b41e330a1a7633864925344bb4303b1dc18c88d16b55af8066fcd48fa038d76927c3e706c744ce61973ea0de7e1331f738f6a59a337c4ca748347f87f4e0ee9773435ae5a63e6e173b0e6f8aaf587e92ba440cbf5889e42a9b19bcfd64f130441784906fd961c7489c8508c6b7ff77aa1ffb778448ce32bd6055760204897146a8cf750ecab1bbff7816d6e10664a46801a5616108b0e9a8a5f64b1cb4e9160747d47e22587fff384227fa04ac11d425b6e15318945e4f1ae896fb582aaef95f38611cba8d424390976d34615d3a064759e12089d21b86cc7b2fad7dff17d095e5dfd28f28bd92495c6c6c8c7be42d4de6c063b3f336d21b06f952c715b0bc33c878ba693808ef4766ab2b1ea9019619457dbbf87f92e0ee374c992248146865e5688b9e61280084ea17b2f04f56cec4c26c7b5953cdb51f29ff415387ef1d6f6cd24f21a7c27b32ba30834f05581dc3e28404063208126193a262376ecda4c762382cb5fdced22105504313db9dc284dad4f4650fcc569ce3ea4e44a8ec5411867e437afa104d9bcede206c63abfac8af06b3ea47fc3d2f48e85d8da21451abac55207ef6bec4c1f89165f864cea8ba76ee4e45ef3b08636022572840c3ca69c061a98871ce42a0200b8a0b7e30ce520cc8020f0e7593a9436b3ef2d099ef2eb56d1180b148c4c6d4eba9ed9f1e7eccc5fa6f0476fe14b9f07ea6f63da51a7f7bee6b6c61d7c4c019fb6f57f50610515be1318f0d85a86963c1f420d219d1264adc07dcf1161bcaf837d6cb9c3d14bdece2382d8c568e3b45a693c97296c4f5f37d43a1fe941155395c04b95c9fca50d2dbb0a3523de3ef05ad561e2ed11cd1d2258e1e7c659e6b96c7fe166799eb7653fb09881bad5f491de9dfe34ca306ea48b1dbbbb9de3dedcb84c8cc57dae38d183c0876e746b785c03e66154d52337e0bf9ec6a28b603683da8730cd620b71bffa9048908eae95feba58edae99512b04f175e27a7a67394f868f91f17b86dea5537e5f256b47ba305a61d9e84ff77672181c6dcc0ca650bb01a5c45578501e0c416de1a41aaf16abd20086ed6a515c6a1442cb784662656d1fa83430801cad150cf06ffcaa0f2ad67055389f6fe5558c2689bbec141164fc69833516ad8512b177b65d9f887d286fa95a5d5e36cd2fa8dcf7160db2e9fda97a2330fadafd4bbb6672146d8d0af184e9c131a7a18c382c99d34e8bc31754a9a30cbff1fb4fa624f9e2dfdb15d53c5a83fea362ad00fc0dedf897ab01c323950c0da1f4a1b77c956b40b3f796efd57ed1cd850bb754a506f675da039627971c6438d6b7d6b1626020ff02e724b17971a9fdfd1da8ae829441c6f0247db0beeb02ec96a78ac29ed85337c5df3fe43578f46ad920b7bd9f46898a2e002a4403a712602813a8fb8166dd57c1ec8fde9adf41da80e69a920be35f495c4d9066cf9fc23286d2d7f6f8ca4ef9a0bf28cd996893912c416d27b8b330a5788be0d4122b8f110c1bac5406210d4679a3454fa69ab059b13eb4a070698290c1722bd4eb772a48334e46229a8f4fcc3e85eefe890593e9860b4c82f509a11d3832eada0b913adf418209a7a1f63fb1c427e549ad90c9e619e18f0eb454e577b2f03cd185bfd8efb76734f8decb306fd15c13e5777eff49a5cb61f016b369a5609c38ac436b3f1d634e075c61d3c100abfee1d418f2f2c51bc4c8a3cf84b802e6c9400a6be6f08b6477bb5f14c145da47aaf972429d88012161d7f9662af60ef363c21ec1eec4813057cff08e230611f8a26ca3a1078e74b4fd37e0dc4005a4f94c4dc662756671a7e2f20c520674a7d9ebfb266589f8b0e0202403fae93f31731b27621d1ff72583a8c84ccc0c328fcae3d39a056c8c68f95b1322c61e3e7a13854a960aa84074ae298ed2acb2d66434db214180bd77b0359ea0a28daed11b4485e7536ca8adc81a0727fa9701fa1019e24115b07ce6737eb0685e33c880fb9ed9394a461714fefcc766a2bf2c0a42163a6d64412f1a364dee3463a1bbc2467d8446108ef2a2304380c9acab7d267d13eda72a302c2d71c404db3f88faa6108c6e16367dbbfeb2b716146ec8c96697"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000116c40)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000116e40)={0x0, 0x0, "8ec903bf053f652b8aae9940fd40078b1dfec6370c72648f509174a094cc64cc579bba1168768e586be2c2db4e210510b3005dda128ad1453a65679256966090f427d64217ccb47ca2579489a475ff67a264e4764b82c05a49bc99b50cfc33b2c542f0e143f0fe830d94986ddca872db6d5380bcff3ada0a872edfaec7a6729cfab753403f8df28f2b046e80400ff99fe328dfe4f670c53cb067d70b42bcccb5410f58db6b774d849311cff2f18801cb6b82cf1622dc283b9002beb64d9a5fb0fec7b3bb6b0b6ef3cdac7d36c8ac6a3771b8c4c746a118934725c8c2ad4019881acd243d3ede9cfb220a56849d718c38992ee59b069850f657b22e08f778cb46", "03dcaf1f9893440d48b78544e0f663e1f5b1cb5b29f32e7e1ef8ff26d1cb286aac2898d7db4bafa536e56e130fc6c9de579a6997ef306449edd25c1569d2d1c14b9f93ba28142af38a98678f02fc97c8ddd08b80b1b48333d58e486f4cb468c75adcbd2aa34c550e9f29b4d61eaeeba87ad9f4d01fa7108f82ad0d1b8e02cf71a0b325bd82d2d3faece05771556f94bdbfcca8874ea24dcb8dbebb227fe97a3288256b1feecb82148ae1a6f0a7d7838f16531cadec25369e7ec40ccd87b3bfee78c4ebf62f628a9ac0b05db442f1137830a2df8a1f5b2ce045b7c34a4370bcfedbdb459ddff9e9e5f4ccf05753f67e0e5357742c8b3086ab1247e4ee107eaabdd73d36dcd0bf045eac2f758bb9ee6966c1314d742473d3cd07eec711b3f62de19df36383723e62f6478590dd2270488e5891bc86e853031ca7f0c1f45d7de3d12d02c560590785f31b587c629fbb6a6e5407e81fa2d4892078237a7c1ecc0057ce1d36b70b604dc3311596d38c4b7aa0d3cc404c7b35c1f6a26c8e802db968fd10f0684c3b026aecdda4b31dcf8e5faa832a856af7e83965518341e8657d46d7d48b81242aa637d66f03be22c4e4cdf07e0be6c568aa653821861459b5eac765d206529fba81bda37a2636e73ada347beeb29ef57d5d4238626623faaef94453b8bd956eb524c9ac2054b1622e6c50a0f54ca1814d47d1f427b198a598624e5a7acba4f9c09cff789c9a48af5a924ec0dfb466726cd37ebaab478e258a80a125d2cda04042d0f00a95b459ff836bbbf4283e0cf6ca970625a4889e19f7ef9585cc372eb8a9f3a988ba197cfa8e8b637fd8c652bbb70c368eeb9324c3a1d4506e4a042781b3e7e1d1d8fd675b7282aef6089a7860ab3ade6cbc303c679fb5b527c86ca41c972bdaf7db11809d47f798c0987a88577da04b57be3ab91cf0a050aa4917e5ad7f21dcc4feb81243e5fd4624a8600c33d8adda0c65b8720bd62dab28cd3c358912beb90562d55c3207a193423fc7d77147cb1040508742c4e90a4cd42f136f21c751582e19b9d7796a43b604206045e31003ef576bdc77304ba89fd9c6616b8f4dd05b5bd6cea1407d1cf265ad9679911ef63b7e3ca967d0631981c784caaa96d4f66d3e6b09fbd0179df9d1c7b157bf9b6eb036c159fe9d703f6a97d06ddcebf0b37cfbf295dff20f76fee8dd4163b9e92f9fbccf9e088d7938493bbd0fa2237bbb6ae2bd601fddb10b3b129f97784bf487b4e9c0a00e1a529f634bea6c1dea9e0d3026f24d0e5ed7e8430eb984f38d6f9b4782e79d06dd1b2ac9c5c35ddae25e029366b03b9d58d75b0fa4b232378882094254e879353110e5541541de17189326700b22fb8cff5651cd7ff40d89b0bed3b45c72f0568aec709fa333a7131273b1b7eee403a5115349b4cfffb47e670a01b7aab15aa23df7b01b8b8fa1846f8a48da5f5a2aff3c1419991ee4ac27d43ab2097ce1ea27572b4581c56cff363312dc9ab6a47391a2ab97956f7923c028f4162fd0a58f03aaca5bf1b18f5c1f0ce3651bd18b73bb2c09c1e060e372ef2a32c1a8a4ac8a3b78545ed079b4813a2356e9a83c56174e6220c7f929ef23810ef7f97615294b86a95e53310edf3fd8ba1ce591499d199f92860d948b8af662e882a432d5ae9066054e6c6e0f45e83d9c801923029b649894697ea9a18b161991ee95907ef9d4fe3f2d28c95aa8d9b3ea6f63db1b5150d4a7b3bedb5dafba3ec25272932d3c5e622bb40c7996eb992a21dc99ca906ef28e9d828430371dbcfe90b3be0976289e2096b2a3f4fde7e462fe89e6c6b663c49e873b3dd5c3eb9cd45d1c64543f17cd35d669c041665a1a363992f4b86b523e1d2ef9be87c08c43717dfe231ceb3ee6377aacf76cc483e945877a4d4548897d086dee815f1111d4403010d3bc87ae02a307d5a89aa6c14f013cfa8e1c7e724ecbb89447c37e0f2d13591a9267e5e5bee8b17ca28f7ef49aba56046538e21e251b9d475724f8bf36ff7329cc1c53a29cb610d1f1a465f966e1539cffa0256a8d83757b4c554b51a8a226cc1fd5f3a1a3f211cdb2b966b12d3a944d59774ddf19b1b04a693a69f22025a872234d9bbb606b26471709c6cacda5dccf8398857b4abd3a14f81a331bde2699bc231bb22e74d0a9c84b4af494f89e5d934624b9b1c6f6f7f1c147a62f1e6ce408cc3aae2f98f6ae6c905316a202b568848d70773ba4cc8c28373f3674e4cbf50d47512a5ba8c90a9e9862bb689f56e42fb61101efe3f78bc460c76c7802504f8931f7f7a22cbbff1e090cefb39644ca78ea3317b1ad3eb8216ec67e53e8243cef030afe6999a5f7fd160c2f7e46612536b5480b5876609349111108a2b4c2ae6c3335dd7122d0e5f93808b454df43e3d9459ea7c75ec9bfd1b15188d7e890141444a34cb0a505d8d34cc50acd8bccf80f755b2c0aef0613d9c6dba88a323c318ec36ea19866c210037b7719a8cadb418d046cebb55bf2f134dca393bc2cb5278102584715e1500ba68208601b587ba6b3e026b717c609392772edd60aae8a8df223c51e8eb005241fba1cef80e309dc47e3e78cff043fa96c48ee02d1e0c507da17bbacd69dd08198a1ce2ad09e05a78ed8d7bf1cea454b8fcfcf807766300aa2382c3a9f0f79aa2eb85eae51da00d9364fe85abb40b3f63df3cdf88c88f4612fcd2a4be4e683a87863d520988922b630a76f5c3d87c89eb685d69873efc4604852a086f290f10c8ab6d595f2e73b3dfcb27cbf5b7990be66594a1989950f53b696cc4ca96a669e676b5813d8d9258aa231c3159a7b5d9a1f97cbd537e0e5e78b765a41eaf3a182b56844cf4244894196c7a68dc6b4040a6728743650a83e050daa580f876b6cc4e2d9429cfa9e8052262127c3d59857cc58fccd5909bd7e48c689d3c75b0ad8d6ece1d89e044442d7e66a71a53740614f8d59230f10a58b7bff957fa3ed6a2b87c1760eb8d076bd948b739c36f693d7e479b3d5e909f638c2cbaa8b0534ec930e97d9c4647816c4c350c7f2de0096066b6c755e8eccbebf09a670fe17f2b1ef2087900a6ffb41f078ee9f4465efb073abe0b89a4904637e2ffd81d2cb050691663718403f38cad4f3165a54906aea1bb56d51224e974337c46ff3a91eeb1679aec87249feb0cd60883ee9ad7ef9e823880d2de9abfcfc15148ead2f2caf40735bb02bc7491c99051b9cd382b3b84a86ea93c671e78405f3f575cba517af9799fe436fddc915938b184aac511f6e502b3d75346119858ab29496c96ed769db712b181d069ae5d6b68ebd21b5deb6611db19cc88a63d14076d19b550b0a0bfa5065cb0473ccd9ece619806c8034ffe05c5f63640acbe5869e3e97e4bab218469f7e668213f041849deba3f161a2cc3d26bb8085afce1202a3b6e716e6808f05ba26258865df71d69a07ec8dfdfd1c325b5b697593dc50c049cb8faa4b1b03bf320b4daee36ae70a0a0d08d541efb54b5220360698b8a568d4772c4394f49fe494496dcc007429981d6d14574d655f9e0db6a2956bd686a42e8f55693591657d919cebe24e82351eb246bcfd0a6cb378df29e087c33a39e0f94e6e3d597544aa5710a1695c63cebf43efa8f18a00903f55ff578d89665e25e742ae17f8fb8ad4ef93a8ce4739f3aa1554c559df0853c67590c2e3f33e7bcabd410518639fd157e3d21ae198cda85a810c9841e2d2be0f04b78d3f3bc37518f1f4c3d87b08a06ba43f38a500046965d8c560bfa78f4b6f12f77b33db1caec0cc637577b8bcada413139c5923b41355da8a2059bb1ec0f055477f49977584fd529a9c6b30ea6847b91cd2782e63530d54d53e2ce042aedba3abbe2297ec6020ccd5b793da75dc4959e9fafa6a11ce6a2c253f1e22948aa64f00bfcc5bcd32e8a1c3de126f40164ebf9446ad056618771d1d2f9ace2442d8287154852b53e6b44eb0c88fc80f7345de164221c6fb050dd410a8c2cdc35f605fc462c4b8f4a70684b2e35110751609ee1e72a18c3a34beae262c5b95f2e4ad5cfdc09279433609bea402f5868f49c62226bb892d0fd881e55a1588cc1be9584277998cc81cd29c50f6d83e589269570d47266a09515700303ca115f7e958cba81a739abf858f1bf96f8fce97106abc9a2b1482cec0f3644f8a852a566f1eca56b0db27bcf5f48a2675df2c63c6191055214cf31671616b16d2061ca030cac4a71d81dd4313e9f14cecb9f9def8894d5a092dd212d970fd0c6fc71935a6b20204d96eaac97d137220143b259fb19a45cc758924abcda3cdef8e50dc108b7272aa280ada8955266142acc87bb6d2e375e3751f37f0135df0c15685097e7e5ac566151cdbd96c64d98401543db76ca7f3aacd916238e18dc474990931b66f1424c091b3dd7712df634e038f61edfa70754a47df0f15af62e94f7ba10c613a0728f002ad7fc466f0d2cf53976efd10c54068e3e0f5f097dc168cf9e7633ee9d2bf5765d5b0d167763e43de53991181be485e51e8bbd4b330a470bfc86ed7ead7e04c5d0f08c1498bb0a654a2e8c928c7019c719173f465507696bfb7c2fe21d61ec8e36087b68b8a5360daaa8225409b6f03e0c281e2ac78c2b6acac618829689c44d53d09efc09a3d6181e77fd6e24bc516a132925eddd2eefc416df6445fa2f08d4cbf209a30caf6678cfa81c778d32c8f7da0383ddd627b3681d8a8bd65ad8721a298b72450342ac679e9607e5ac7b3af600f87eca8d691fec32076d24113f6f4699aec2f6aca2f8753ab7871cdbed3b8073eb8438f8afc807d9e5e8530d298e69fcd0b62fb63b099180a6a1bd57fe63e3223747bc973c4e2781665325c702505cba798ea979e61c86f4af8d855f3c1ddc90661ea05262464703ae88cf52f1a451fe462fe0abcef4972b44ffe20b1f90608e63c1899cb700bbbdc6e04b3800a7936fd5a90c701a16c6ffd10546878e366ee4e6ad80d8cba8f01efa16c7e0b4389777b820771c87231467b4371f3e603c859580ccae225e9c7e6ab3deff4ada175f1c8ee885367f964db6ed0bd5d70f811d93fd592fc9be1c86da99ab1923c7a49105ae34c27cfae67f07ab7a147b307914f75aa90954e409a8d931f07315dcb8c1e0bd904001af7a63ae9bf286c87df6c7d8e46b9d338b9bdd72fbb04e44069fbc3e1c80d6af1cca849f9543069a5b60e06d7be1fc0ec25071f2189e9759be5fb5b0efde0e0a3f8c637b5cdd2da501bf3972de07a775b12fad2a00168ed79b45291a77e74c9687528f4d128e2fc6a7a62252ef91161f30c6aeb7e16890549a2fcb487aba2086c9ac4a22ad97fcaf4e8ed5a2cedf16cdd8d9a54d23cfa2727d0493b9f1fd14e4cd099e29b01232ac66a9571324f3538163832fab4522e49d43260d762195655"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000117e40)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000118040)={0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f0000118240)={0x80, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x0, "ed6b78f4bf3667"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000119240)={0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f0000119440)={0xff, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r1, r2}, {}, {0x0, r3}, {r4, r5}, {r6, r7}, {r13}, {r14, r19}, {0x0, r22}, {r23, r24}, {0x0, r25}, {r26, r27}, {}, {r28, r29}, {r30, r31}, {r32, r33}, {r34, r35}, {r36, r37}, {r38}], 0x8, "bce5f58932c3a5"}) 00:20:23 executing program 3: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x240, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:23 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc020660b, &(0x7f0000001440)) [ 1223.328521][ T1942] team0 (unregistering): Port device team_slave_1 removed [ 1223.360159][ T1942] team0 (unregistering): Port device team_slave_0 removed 00:20:23 executing program 1: syz_open_dev$dri(&(0x7f0000000040), 0x1, 0x100) syz_open_dev$dri(&(0x7f0000000040), 0x1, 0x100) (async) 00:20:23 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async, rerun: 32) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) (rerun: 32) [ 1223.402520][ T1942] bond0 (unregistering): (slave bond_slave_1): Releasing backup interface 00:20:23 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x200, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:23 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000108c40)={0x0, ""/256, 0x0}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000108e40)={0x0, ""/256, 0x0, 0x0}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000109040)={0x0, 0x0, "f5fd58cb9d2746e76481993acc06042a1ff4ddaf1f1493f41e6a2a91246abaeb0da4ba1ac598ae5967e0cea69257486302d15716526ec75a5168e1ef8a56a4f2eeabc3c6618ab9fbc00f8632153d22305c224affb2a78497c17ee389dff0e2687a635571997803e6424e6b9af8380036308cde7b2acd94110ee4d64e322ba336aa77d3108232f21dbd2e55adb821e2aaad6ebec1afd0ca2e03ee5b3bff99b732f98a49ac88d6f27cec73070cbad3dba7e9dc50270c17063ea24bb3fe9fd644cb425d5034b2c47fa14a22934690331d96357730f400bab48d7cdf137508829ad62dab15c569d327992e16c661d8e42ff72e56656d4ddddd54d66c62e3d4b20428", "d6287028aad77bf103766764d658cdc8e7d6b2022eb1c23eba4b4783c8a963c2f00311d996d50dbf403153f05ae2626d2f6b6b029ece5a999c8bc985e0603bd2ece86de1b4b37841dc53990ee9f008d6fb9a92c5ddcd2190da43987727a5fe0f33ba9615545eda8241bf22b5592ebd0db88d46e6f79c588723088524b3a66740f2ec51c11309b1b4295ef3bef371e3febd15e4b344a07f69faabccf1c08b22e55a460ff3f9c651e76ad5d9b4bcc6a70f5b592a9efc6fdce68e364992cec743017d3be70b3cb79e4ab00a0ef3e202080746eb10a460f9cdf7a02cec6cbc63e0daaedd9cc84495f346e1ae90f6b19f904f20609578dbdcf884eca33ce9a4a1161366c4f0f0b8bf768d070a8c9eb90e18a0023fb64d89a90799ba1db9f5ecdf113794ccc94bcf439b2932fbc6b42098b687ccd54a6d638a3629da986ec004d9c37c5a6c5676637dbc368806210d1348a98c843e689d0be7acefa3e67cc604e649d47423e07a2a5f665b5de147c2961b82ad2f474d8b34be886ca7ab790dc2fee9d7d3513b9f4ed86bff1a7a51e82685fd7e9360b3ab137ceaa13e8002e5ac9e34aa6047cfe0745327b27f37cdfc4de2d5229801cba6b478a9dac75bb8412f201c18a8d35d11e14bdb77dfc7367fc35ba5be2b4bcff919218eed7cdcae459d5ad6198978dd2d628f4f7bafc10e961a2a22a2897fa8cbcb32f12f457b66c3ef3be93dd8f75211644fe4053fc07fe4d280f13e1f7eca2e930372742dde4f58e0165f4c53c6be5897ab3c4e2aed3b4f2a630f05384e8157c7ae561031b5959de1cbad7cf09ffce39b79509802bec765ef5b3e31ca3849ab4cd0dfa00a3a0b4969349c80b5f567127487d8b0660b4a531fd2af30aa26260e2bec9d21a7a464106cfadd73b14021e461417ec99efb9cd4ef256b18433e54d96ea872088be30031496258ea660041675f8bbec7f4d88febd6b5753044f20dacad95e401f517aff4a24cf209dd2e171f058ffaeb99e9101170ed2dd5a5eeba55ae39d3fe8064ebea262a9b97aa7d0de302cad5b84b6f9aa1b68158932f6d2c22cee2f849267bb2cf213c107ce99972a1911c1a5b051f30228a964a449603964f814036723b88524dab15826f79078906e6324d227f3cd1325be91da637646b1e5165a2dc7a1659f8b30b3dbee2d417c5ecbbf438bfc2ab473c721bcedc8617d8c094fe3d3c3fa86b06056ea5f5ba29016ac25ee7aa97d9c45881d59fbf6c42f1bc7c2192ce1f013732300944fd7471f552c716d01c6e5885a92281aa48fcae846e986b58610cbba0adb2a2c6859ab4b7562adefa0eacf1147af2efdc28ce9e4d4e0c7b61ae719f94d21f8ae954256d370b13f142e33dda461025fa21502bba4bdd8dfba19713f6d30f59c50127e18e082d1939ed79d14d127023a82ef75e292060ed6c14394d544740eb2bb8dbfe4da075f5947781f47800b2f75fa87db44027193bf7e2b7bd616043a6aff218f314779a5cb81539710815a950d4aebb4801a227741fcafe2d816f8c3102a71f1592ceaea1e0529e89ac7423d5adc73d8a9fe2f8ddea3ab326859b8226aff3c62e977c28a5993dfdbb237becf8438e23c0e9bc05dd378bc5faf0b9dbb8ce28667dc4da9df9ca4b800a6f2c9b423641937ecb39a4a9d84785b20c3a51579f724945e04afebf306d172971aafbece46cfece45018736cae93519f65b481773a4d28fe112d427bde16cf8c5be66825034107a349824a93499e7474d4640ee98ac62710f1c8d5b57999d33e7e6f87b7208407bbafcb09f21c4d079029674c80d0e2f39c588ab1eb561edc8392bfdb51b70abe8abfa365af78a8f30e91a4da4003cd795d345b6142690cbc7ced14d707c55e1d248c2b16d5b3879245c2e3f45e87f2bc82c046dfd2d6b3999cf38f0e5c532144b3760b0a6ea07ce5158ceb133519661b02c31ff1239ef5ae4490f31cd6b45f65d7d19bc5e17d4c65009cf906e67bd96acc6d0ee5200b6c7962c4ba2f95e068dd19c153f3fd54561d9022337d8011815aa44b7f498e1970930f948ac6c267bf8611ea0defdfd6dacc09478b06a92da2727d0b0c3e61d75810ede4edc1db6321014809e8ff5b98b016e2ff8ef6fc74da98170865f3a7a51083fcdf769c198e1a810f67eb4f9b4b9a658d395999a41f7a3f43311278f9a3070c18d7d8963a536ddcd861495268063d28c5c0c4a879f70951820b70e289cdac76976f93f48c5506891e1b1d3b2b6bda3c29ade2a1272a3366fdb76dcea5ad409c736cc52e874e0dd54e88cf2d91166b98ee00f231b9da03061ecac56451e6cfb47520ac610dbcf4aa2d7456df6f5f0adbb448cb6e1bf83390ad4187674735146ce65b220651c2206974fee450e1cfee44905b87da5a07d1b1f9f4ab71f9274ce0e4e26f8e5a03fa3b58d3c95dfd395be8fcb7b6e5b5feda990dd4646773afa28d03b8171f4f8b59a10e4d58dfa71e26f71c7b906ab451cb0d5ee1e975a403995719aca3b25b05acc74bb142f3882ff0314791e4e0833e226885af53de567cce21703cf827d0740c6e9f5bb47f178cf49661cd594fc0877c6bfe05a3b5208ed5c974bc28f9bbbafeac9b1446143c2832215b0b7e7d8f427c277ad6e0d2b4c53f18ba8935a9c1bb21ef16c8b16fdaf2b4d0c5dab9460f6a319a3dd94aaa73bff2ef2b9ddc53f8c4989dc5c22074dcfad9ce461a0afc64db2651ee78e5a9486c1b87c8c91fd3236a15682cd8686d1aded75a2fca5f808e448eb329bbcec72795698b381f69e43d7b4c9528050f1a12761c527ddff5a4a7b02efd0e78c4fcecf2d79079efd48d9a1554a27a99aa45d4c004a329fccefd8b18e6261e1d3288db138799cbd6ab265b1d3ee4ceb2e5d6c9fa2139894d1a33c3ec605102cafad11d08b30fc5d268b75842c43442938f23b52148c7790b43914572e689799b9a16467fadd27b5f9f24b2e188c475d7050f5dd0a5b8c8f3a3256e864315b4f117edb94cac3ef0c1a7980b99c38b9f56268c2e4f2acc421422e9c94bea75102931ab02e20eddb89e60e3a96d471a0b772bb2a0b0d8b8bff046465af2162aa936a4ba328971713dea1e87517b54446d0bb4010522a83535ab7a36fd66d1131f8a971e0a5b6b4ac6e4f8d6ce8c29dd4bf8e96fed832e578c3a5113be40816b5ffd6960f0e22a81831a30f351ac6a4c616558244bf7ce9bd9455df085ddb601e3f1f0f18ae92cfb0e4c0310cfbb726b14f26e2458c95817f553a1052e2cdf28d97a24ef0e16db161bf8db6447be27c924dc18f81a68e1bfc78cb81ea622ebc218b6fb02d357ae4c8f801a832caf13e4d6c26b41247d76ba5e1bca53867dfee7b65f4ca912f737bf556ca60e2b0b5d9ed6f7f4901da97de3d0e5b23ac62df2592ae3347cf8ec60ab093a3bf35dc5ba89d0d4985937f78870ab01ee316b15c9351fbf3c4c41767a5d64e8e9f20149aea13eef5ae0d274aacc8cac579dc9cda2df2b5dde68c6343b62ce6b554bf21f45e7ebd0fa7200cf3c2125803af841002657a696935d1fd07259f5588e59e856de572d7062466bdaa070ab8e16a5de87fe58c5470e9090577f8754f918716aab45d2da5c5f0bc052434cc55ddc81655e7eac26e3b995b9c24480375f37de807ab1f72de88447552a6958e6019d732466ab2641f9d8f5eaf1f96b9f9c0a52340ac5566bb2fccfc2e80ad8494e8da59c12c52202195b4654bc17329109321560b562bd9a9c3616933430c10c6fa079fa9b6c5059c1d90198c0b943b6399d0a7d9a6244ecba16e99e31b3e9a64ccb8176345f24156aeaeed8d5d9f3c04a36a122f3da3f0c14be9a2ce72f3351180e4888546f53a256dd042d7eb3ab9e243463379d59323551d9fcb7e1007d76e1451e87dc9466012d7bfd540f9ce4cf8663d455881a21dec249440df889224a3b9ed95bfe71879dd23aa4cfdd5bee5aca05681132dc5b54795803b3489ca532df3005d83bbe556d616a69672bad1a31be349b1d8f2bba950053a718e819c0ba9872f62f11d611f4974985298fabfc632704ff2f9ea31c8b9a1e24fda98dd2a0fd9c903fea1cb9585ca487b8efd319244c5f4d610e7dea24603da40facc40c86420e60424fee4c37bf8b342d7e5a4cb9d7d3f8382264813c72c9e36a8a7070c07e58432d595c7afbe7bcea414c8e3369c0cff8209a2935e680ac0f6c4f90281f9a9dbd2f11db908e48cdd18164a684a007263953d5e3365ba6e6eabdab628491406f0a8129f6742579cec2e9188c09b7b2e4a2e43266654aa976b3f4dc8047f6bdc7ceea2ebd39e11b1e9aca8845fe0b243da0230aafb066e8b30d297a99d88ab267df35c40207a932e21e47eebb6b10881b22252e139a3657d36b38935cc29a6ca6759b43e6c241286d63778b320f2948e25b4cfd79039256dda56fc9f0577a89cce3872df778638a2abe8c6d30e2cbb96ef73b75bce705a76864ea0c2c9866f0ddc4abdfaaa4ddedf5e766443d0b6bbe209edb04370c24cbf6d05e96c7305e475b085ee348a67b0578a4e563e6101ccad08c4de2bfb82df8b77fc25c14c781e7665c0af5cc2b4f41c700bb8382312141b00e7281cf663cc46f6105b225a2e4508110c4b0364d0834a88fbb66ef18ce2e4adc94e37ed33e3c636cf4ad02324683445ad733667bd025985f99ced050822d5eeac2c46dc94e9fa54b71f9bba706097cb5f4925e421743ece08bdd16280ccf159f7c8ef64759832faa0f486c324c6596a30ec99424b7b255377ac5782c58fe310bfda575b5949a7c7b5bf0bd8c4ab8224e49ce8506e5cd0b9d82bba85b117fe1ee1653794bf1bb9dde07191273b1fbcf4722f9db1a586010cf9087166e469b8bfb43c79a4fa521851edf7014f793dea44fa0b1162ba37e4110600a833a5763367a9701ffd430992a665e6336870da96b10765aac0814c270605fa5653913d134d76a78efe09b7ce5c6bd8f9008732fb1f24e50ce0715b8088cdf500edf8ac848b943294162bfc228130a3bd7af9716617835f15e7acaeb26a4b972d06bf3067b74e4f0695cfc8c25049f39f6efeb2fea7dfc9e54fc121c6136a0187e6bae3c32c09bd7d8b9e90e3eaa9bfe61a7786bc9a6b963032bb1184608c29b4213f531087ae5652edece3e664a040b1d7eea10cd2b2abadf56d0b32536f3a1fec8b078584548e98efbffa8da35181c94330d795f17a2286b227ee3d9d5ccb27422fbdd1df475469401723bdb4962fd49a0cafd01173f2a89db680284fcaa018f3e408639f14d19c4a8827123a0ea6051fdb41446bda89e7a536e5b57a6c39f5602407c90a6863341a1bf8d376a6c8c4ddc39e1a5d0b7151dbdde81645c0afa9afbc693ce9d06856ce2c31c985713a6906d16d5d0a79c9bcfcc7828770d76f05be47a67a2bf6af5b"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f000010a040)={0x0, ""/256, 0x0}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f000010a240)={0x0, ""/256, 0x0, 0x0}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f000010a440)={0x0, 0x0, "7398bdfbe4bd0ff04668df018d45baf9c38fb9fef88742de6760e9720bf5904d22d3e5ce7a73b963fdc8966023e4490f22308dba6a87fb4a5ae8dd91e28e4c08892702cbe196b5bb3c763561eeb19a2aefbf90ecca4e02e65e47de0fd579dc9f662d473cfc220aa759e1ea66c92bb637da5f26d1111da6c1efc395ecfe60307fe0fc3c390615daa01d05cd7e182ea8467b5ece9984154c2df63a033dc2fe22eda7ae37f4e2ca10051142cf86196e0f7c05d5077b99c600055a28ed570bd77698de578191a070dde5ee3e2ba9e66075e438830dcff4a91b725748a3a601839ee47e39d7cadbdc31a49b8fe5cd707a5462af4e89030c234d4b306a49825d92aae4", "e5fe8a3011c7ec93ebd8620b09f12ae70d02798dd7b296b2d3a0fb436cf76b6f82e4474864a57bcaebf9f1b073114fdca24b06124947e77255473d5bcfccbc9c23ea9c420dcfbb74064603cda01911d5581dcf8399a2a1177512c59f3b37c1713b9abfbb0d19b1b7192fc204e425eb82d6d4ebe10434fc326b2f0d5c8ecdccda4857c44594fe8d1f61144090ed51bf4491a3472cfe5ed19bfc53b33a58178dd1c14222cccd564604a455b5e32b5046c208912d5338184c51758b394a50c071af90623ffde5bbd495264c6bfb7e62e8c0e5c6ba8cd5b964e4f46dc32314fe968e7e21cf8704889d09524fb13f0820680a3f21aa09626d21001f37764cc2bfbac852c008eacfd2a042b9881bda515f8f69674197516837767ca0d07e8742a01991921fbd02790f819f35e9527f5c1fdd3a6b422d624f7b7deb5848b01c7368e1c8e9c49a14fc6226888a06ba3bb70452a43fa84eb1c4552366d0531f8d4138d70dba162dbfc90282a3a4baccbf63cbf0c06ac3dd9e236d0324c20ebf0f26b70ab9fc4a8e4fdfcdca5c5550791c7ebe5a8644cf530fa07ddc1f01f061491915538b26275803687493328aadeddd2a17a87a9c1044092a153456da1d325449599882bf2b9bd4ac1902b21aa1427024b0e642c1ae0900eda851c193867cebdc7b9f22c7faa05ba9db86e3fc22160a9f72cc3013946efd7d3e8bdacb51729ee6b6575dec0a31b01aa53da71a8e46f25198c7356119c00e44f8bff84e90feb4b8a9e1210ad71e7c40a6632063004662c0c5ade8853b3b305ac1b327a1b88736fe96f7a7164874d2fedf604a5bd29d12ccc1f1e5d40c11e54566aca9e51db09f9f2bbc38a3cad95f79b0e90f3f325f287e6b20e40f33a3047260b55543a0fd39b6ce3a73a5b20c41449e38ee1eb4be32207694ac05025dd896c73f1fa493ac49e93a5a7e70e01260de5bf1e36505bd01622aebaca9f6abf2a50a8b09454f4d2915ec89e4145ccb5222dccc4e859c21167934db6e57aa0c9f08a84c6ee228fdd03df18b2f9a86f39eff7ac8b2d5d4166a3c9cbf30c50e269eba7a778237af5d6f222c448e7235613b53c3432f2f80349eee2d7960ee195e69c866f7b33354d896b3c3485798a116a5ca198ee644b98c7d16a169c6e7c463f435a58a578e27245a6156542c0a55c23fb8f8ca067752f78f5766a037c8c151d7401fdf8082d11f5fd2878944a5ddf6e1b8e6a9653101f802fe2f9523793a1b4197ef2d89dfbebaa1c998fcb204cd361c14f300ea3f951f5a589309a5b6d3155bbcc18a9037ced1329b023e7e719f17593bbd4fc9255ecf644d3bdf9a26be92d6b601c2c1368493ff3a3937a5c2d6b675f428ad30c3120fb0f506adf4a8f1faefbf25018f5d44ab48ce84eed7b9e5faec6cd305a2be5b447b8e9877cfffed95941bdb99c9a2361e307f9027e6581e128f134dad0130258d8886cba7e09eb73e8c5187191d4d61d47ef786b0388c89ee0a83cb19125475c4fd4799050a05a1a1044e1bda226dc0a469f8b6c331c188200ccfbb888779a432c3cd2e8667c17953407e44d29558eaf1de46aec38d41fa726b7d7dc6fa0ec8c371f89abceb5d65ec3bda78def7f630b5b0abe0519ffb97b75be7fa10908a25217a67768182ee3ffd91ddc81ba1a2c7cc1ef04f3b20ed2aa6133a1d0ce563d4e19ef38de7138a83855cbfe86634e7d6fd6e8f09748264ff1e0b879f1101f495ad126a5a1e47d966db29d5a97ef1d1c14bc57f2f7953a6b4333f8ef8141f431b77ccbe6330f4da16be90b3acf2a9356ba52eb5e916e8b0114d74ef0dbfffcf6037815b6a3dd0dc0310b8999f56f2517d4ea37510afbb3c6a0a60681dd69eb96e784eb2d3f507e39829f6b3cf39fc3a514602eed2c99f90f940a17f16ceb0f7832be105b8e6893fe727da87473ebf37d71355f011bcf99c22b4a603c17401158d88df0bbf0fb1a6a0643bd27431fa4ee75e773441f1548c740bb654685e22b183adcc5215a738645f8cec0072519eb8ae8082d1bba38bd8b17fb689a8cf93f499562384918704e13f02e6321a9d8eb68334b4257dd18df181643d43694d9735dfd8fc9e252dedbd8040b6fbb428f0bfaf3676aa670d22151083f01dd694bd87f11ba45988d53026bb10ef0118894bee144e12cefde1a293f16ff94d0276180705cff26a94833ca742e80476b1cef583625f72bb9310ffa7927805e047d01a05602e44a9220457ad3759c98569ad33e365b646d2a1aaaeeddd8496cab871f8587fcdd59b66a2c98d268d5c7d19e070e615b4ab722d9e3fa752773b2834496ee5c5609a2097f25bb1dbae062e1cdfc163ea0b8066bbf79b3b94aae706ec86856f13e5a6113a4bf6f62e785ad22f8bcbe355eca682154668c1846c5ca889f9cb42ea724db0582364052bf1fdfa5c3d277de5789dd2471ec9fe54aa7f63f915ac567af42d4ee1692ff3ddbd3fb5216044c8eb9083ddefc4298574db28b00b45b216e02a9ded2beafb6212a161b838773684530f0cf495a8c35bcccf4b0d0efdec20cfb2419fb6106b25194e2fe1b4375def05735ffcde17e83cc64e6fa387b3c38a3175caccd9dae96683bb4eaf5e38a1db5026ad732375aa0ce077e8c29a89f04f5a3175aa039585c5ab6e3842bd61a763fdad82beb14e52dbda9b7c693c18b0d11e970f16affe7a42b80a11da7f279aeb3368d5aad0532fc6bb3b162404bb992fdc39be535cc3db34648b5db73fe8c0d7b616d479dadadcef36cff871a0df824f0f37f15e699e8650e85ff3a3e74931a35640625f6c51c086b9d2f5be6f5d53b506c5835ce2f9c569638f7df494f36494b5f1296e3f36f5a4ff9411a8134411c776b7e298842829881161dc9a02a9fe619662fb5990b7c3ade1c92b0988cc6cd0db77939d7b9a8a025db30265ab9db4705484d7bf714301ed8a5691982f4b6d3dc802128429ccd4687d7c4a24cc787272c1a81f32d9fcf0f9e973d825431cbf92f6f5803549539c661cd078004be577059d6c00728ba6297734a5bc3ab8d569ac32ef171742eef595c3df112e38319c12319052a226aea50dbbbeeb58aa992cfb1a7712f7af2801672085c180d1379efee6230c5ffe0ab0b8b9fa561629374040a48bb13a1ba367037c934d115111c69803baa91efca846d8c7238828ec552ef703f160fa51c8039b34c000129d31c13dc0862b3f547d345a144a95697a1a6378795c0263b9362c241d59674133df2767b41c3af8dfede9aef46e0a6694ceece9d3e412537df4bcef9edec48746af4951487849dbbc4499dfaddd17fb7b57f42f6481d2b928a136dfaac90bb3f9741458cd4bdf024952514944a52d556a7f8e539c5f954b1dd55e039b4b2c119d79a013a203e916de0daf693567dd86fe380546b8df0cb683f7cb42c56487707216c56f2091ebf52e2a2abbe4338399e1fbbccd88fc5d2c7c0f1528c90a6d4a914c757d22c7eb366d5a34e2de08475b09c286f9305bcd0ff583d05c26f13d1377d19a3eed274a63886e7f429ef9fc892902e3e42f12d0581c77ebffe2d9fe07508741625fd89c304e808ebed74be404cd36619d777fda88022fb298e57101d5523bf1198e4ed5dd3d962663f315cb0720b62e369ea57aa396653dd20de811f1afc671432936437fa53e7a76c1949d33e59a655017ccd52bd481adfc6e2bd1acefdfeca9218def12ef1e453673bc711ff70b3c8a10b0e4642a72da63aa20402fff68e125b698b92502e08240ccc754b4dd094a7c8a35a164b0690f0c1c37cd8823b2a85aca909f7dfa85e5569857fdb7bce0548d71331615c10293a7382e585011546e7e76a72f37286d09ff9ca12fb16650df6c72a3fd358a09c050b35608934180c8246531ce58b51efb0501e22cf6e8e17dcd2d053e8d517e034221b02834bbc615147581b4f707c285a7bece31a9a17c835bda72d04b594fe7ae4399ccb53c6c3547c083dfd893b2e86b933b872b0d937b96a54088f081a499a3f640261d19e64e31f148c4774bdd3d05f69511fd46debd753489319448a8daaf36c1a63b64a7d725d7430b2aa908dbc14604954cc65257660623f1a9491b42628fdafda5fe1f73f0a74aa8c4511420cfd1b587d4feec51b409c8eb371abfcf211f904f9b64275b93f4d3115da94948bde9333348e017e20ddb9d49c71e31648f1fa40bf72bd03821d950355f67134bd74d6584c2cfa0a70ade0bf34c0c8770a44ac8b3cd2b0763816f5bb9db423973f6b791dd07cb47c7800372e752508161c16988529f0cae509e5f19fa6cb87b3ae876a4490a3078949d11d3ad31071ceb4eb12b18c3f516faa522c6a93da58d31c5b78bf4d578c36566a38d93d45c630e0e5dd04ffffca7d169bdefa3115f00e6389cf3ca501ffdcb7e44c2a0e1ef37dd20def49d618588401d4ed1ba7f15d667b2fb1ac67472c5454fa5b1cb0d595ba1c13348d9e887b86ce6b4db012e406f7f36775be21e5b2c56129622ee3f298ee72eb8e3a6379a6f2f7aeb4c489175953590b5bcc43435c978725fcaffe6de5745b7e9badf2228ccc1039acbaf38e66467d84b91c30302685b49d1beb45fd14277f6f88d95832ab0c65da29731f52d8fc83c6b9e7c91d67583eeb710c06a4552468f2d841a01b974a19210a2f0e2d735d7fa18663f0f7f603bc2d1752ebc6f4b237216cef45b5e9ef70a45400ad69e101b87f0e1dd7e13d4826c4920fb334766dde41cbf1e355640e7cb9772e649f3576810f189c637e17a75ff6efb7df590624308e9c5288516341141fff9b9a8a474de9bee109e3ede08e31b8f1e1c13cefc85ab4bc7d002e0a70d05d2848a5ec042c734fcbb73e67b034e12273ccc82d3da1275779e78fc04aa5e475a5b99391c6794ddc4c9e1ed1557e564041ce1903a50446c525cf728dc0def4fb66af4a8efb9198cbb7e06a6758033c7278c75f85429c6f7b34937c122765802947a820674f21248ac7f6193707c65cf268ecf4bb5fdb0e7cdba39e1b9320877c1c86d69b48d102fc261cdfa199b817d2f5a6a0f133bc5f79e245d6bdfb882141307d496d0ba2270d89b91c4fc06a9d515388c165e0fb4654fa3af08857756f1c803fb3e6dba5c9475fac7ea7c1dc0eb3d7ec437ca5418d1984428d3e63219d41869673571ca4bb9ac32cf58feaad12314ad2bc0c48fc290c607d39455d8ae06e1395cc2a100c23c363f9c48c5460db0936558e7c1cf5024044cbb708e4d6c0c01cb4e2b83b912af6941ee97f9d028a209fb3d8971f98872d2612b667555743335bf9bf667a1023f8718d06aee215c940181784e04907a4cd495668e5ee267e7a328bab668d3603e52956793ed7988ef2482b9ab955b20cb2c52b6fc039ff6501c6f1f44e0eb8d66a391ef6faa31a56841c1ae4cc1f944"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f000010b440)={0x6, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}, {}, {0x0, 0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x6, "82deff566589ae"}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f000010c440)={0x0, 0x0, "1552bc412b815dd616f0cad39826e51e7649c57e6940c443f1046f627650f28541e1df6122e926c0bcebb5b16dd0750635a55033ef80741ef13d1511534d559da9b5e477033daf65836f2ea26f6f3b8e13865073693d72fec098dfbfb5c1b1eb842e7575f6d74fef8edd98081bf36667c83271849d914f954763b58a8916cdeb475b96a91fcdfda5fb89c6fc15dee20e9acde0eaeb27e86a4c037e9f07559b503e1fbc7533bb3767eeace20c25895c8d387c2b8db17fd322d037e7a50136608757c20a142e9cb46c0e578d98dbaa003094e9a5194dcf02232d306ec455b4ddaec6d5d7a2aa233b17587e3e6f823f32770f309a9640050cf6c4708e5cfd034ed0", "2fe5bc161c4b3d4882271128cb77decce384d9d09faea517f814376b3eba0456242087879be5158df0ec6fb5722785b1795ce48637e4b4b5647ff7b59d172117ebcac00ac9964aca702cbe6931ab95b5656b7726fafe6100da4b1a48700fd1088df5f69d0ea9da08f7ee74228ed1496e61c86ed4508f8b602f200cbe01d334d0705488509f3dd7246a459052a09f7c0baddc477296cb4256a746dc00c118a899f576acbd8ffdd4135fac695c0199313f02ad8f7438c47751fa49cb677ac979bf5be05d63b843c421bb9aa811ef744b02a2fea5b39abee37ab4fcfe10aeaa9a3089b6f70a5efda3a7f120c845fa5eb10ec10be3b3fb242338c1de920b787d16ffccffd4f88f2af7324d81230b0ca6ee6f8f18d9375ca660fa6dff188567c6ee725ef2f85680e44766cbf155d87a4678a00c5bf21056dcf60515aef850554d96c228f8a189417f0adc6be1dd46fae2bbd15c6e5d1fc692ea5da0410cca7f8467d7e0e7c84d3ceac93499013139bebad54f71cdb9398f12183c7f48e0f9636fb0ab904aef2f903c453dcd009613f3a62468c0f0c9a742dc53e878b79a2dfb5684f40fea8cacf606f9e4c516816b1347681b0233e8ab7e28dd94c8205e54e12f0ea736dafc7e02155c7f6deeaf322fad3bfd5ced30f3ab1bf84eabd26793584a0af4226ffc4aaf2385e08f92df0dab7b8a851cb09e6157627b43af46bc1884191b26417c434ba2a706305e4b3b2044f5a9a4b04bd3f7c8c7129c3484e2ab63ceb0039e2bcc4645b334f8d51800481a516b08c02df51b84b0530cbc7fce049f501d1b4b30960969d41b658754e16f6086e82c022d04974eb10d66da6f30ad94a67bebb61884796b03da6c85b169d8425d514b5bb9b767fce202c149d06c6ff4c210f96ff955551fa4a00928effd5321e9212a0774d0271345f2a9bdc5b5342a14bd0f3280fb923d8da7c3143621ccd8910be95f861086ab5706d430e05ff17ae54ff611f9d1f7545831d4823d43ec252a9d6f6fa4ae4c0e8e768ca07aacb006637dab52e53dcf31824063943b240733d556b8d169e19316f2792c266c93a026531580fbc96e2e5e2c75dbef18a3beb1b6591ed3d1f07b0f19f9017929b2b6eabdb404850eaf8b3a8ce8cd36dc95d120aed1520243df6604292de9f5d429aef2b582f5bfb73853192858452d85c5c963505edf262fe4922e8c0ddc441a605aec7685b42cd62594c245e45be8fe7a3c5482160a03904fe120cbfd486c9d37d19e30cfb03d5410384c4449cdb39fa9e6b56c631c54620aab3215aab6b8203f52a0b7ab4923a9fe12a61d215d8a9a0bb017e6e61a7d90a0c12427ea47042101a1514068ebd127d44d71758ca0d562191eab5034c1c4cb32600a6d94b01c6337a75bd6d62eba5fd33f8953cb438d378da0562acc3e9c1b8c80ac846ab81a2f68205eb7c966a25e16d7990fd73e43a7d7a7e88f6f17b4e474ec563cc03efc4d6d84eec05d90f8d050f3906c9966ef295ce16325ba11f2b5025ba732db94a09e70ed1c2aa3f3c2fe8e8e898a0abd7cd4e2d42a7d3a9ea65d1990b5946b078bea9d49e37e77a05c6a7a863a34bc6b277f3ba44f961b36ff00ceee5bd4e119dfcaa9de0b0486b436cfd6959455c4d3606ce6522309398fde327094e89f50054d81a34836d387af4deb6c73804e7b320a99909fd988e4ae7628414cc4b384fbcdba47a05793d19d3302090cc5ff9470f96ef943fea28bc3a5898fd02f7336d67142d10de1bb49a4bb19205205aa77bca019b658917c01dea28a86e7847bfc8809436cb8b41439febe98adca0d53487fef3cfb41b269a128edb3e5c22fa0e4081fa299528c3885811085dbc8458810427c3f3a93af8c30cd34a61c5e8106f839e19bda6245e0d10df6308434dde79670cfc5a6a4c373e2755da491a48103b630290f7508283ffd3667148c78bc2b6f5b202fa94fff3c9d4a714de589a034363694da352561039aa145af8b087b44a218f6759cc98cdc5788f71536dbbec23eb9a78a1bd907a5fff847168689bc40e57e8a124b069995b2aede317e9a008d2ac04c6af37d165e3d11849cbc0391a80f6df948f79c236d9ed42d83e42ad7c247000d30409774ce52c917b6c1dc147841ea751a28877a44d89e07c5c45368af612fdde06c57103dc62487a09d2c05a5d9872214f3839f9a2fb78e5f5ad2f2f79b4da0f1dd4d811a557250c1005637bc40ba6a38c044a2c1fa494c43e5e536d9392da5b3e7956a85baea2d08e5a6b14d4a86716f3c318c111f6d2882b030b8489fb19f38e5045b8cd985d3747c2bb45841f9d801b65d979757b21adc01e3b47c696ca88db397105dcd961ac14076f6399b2560f0bc994a1c148c57470a333b56d842b76663b8f430cf1a8785a4c065c87fbb8d8a5e45564244a55660a73b1d28487069285c2e8510350f1859b8b9cfbdf85240870f431332953d11528a17d873c2ef38a32602a1a921d5a4041878809c96a86cf38263a1ae5d30fc874aa97c657e8f6c18635f5a02f4268a733945a1e4e16bc96d8046e0f5eb3c4dc8ec61e089a1a6416ab44b17703863ba986c1fd2866795bb9a34dcef0ddefbfce09230ca6e92b2fc3b45a100ec66b5626d96eb90ad96d9475c0b79126b635d75b3ef6a21aed553c9dd27f046c510de06d64cbd8400c78fed9b79986dfdb5c639b837abcb9960609b725343961d186737f6a5466bc4bfe6ace2216224402b013d371fc27dee737cc13a5301791c2a0abddb7be81ad1c7a7075d0b262cd7a62dbe182a25c31bf9a8ca029286845478acb9e3d863678d83699f12785c6b225ba65cfee358ed05f38d2d75ad7146aed4dbec9d5b055b7b8cb864d46db2dff83119eadbe0d73abf553e3288cd8a08a0a7a547821007ea61e409a2b71ffdf40db0d257090d39d2cca42ed6bdea97a51d0670d809cc7e0aeff79bee3906d8b8b54465e69ec343e21a8675c5fb31a9aa42d6f5b1bdbca7a80b5907cd25c3c0e6d98b6d7a27c0fe9af2dc74bab4b41cb6d890ebfd12dff410fa35f768bd61c68b757d9f3d60a93fb4204e6880cbe8fbb2c37006883a0f6e670c53ca995cb6f604906665ab1ddcdb1fa4f2659993788be7de2a580a22ded10fc875e4ebfd6bdf13998a528d6d0c3430ba8267e0ed6267b3055d79a13a44de7e0515d2a981602a1edb069388bac68ce0d1f92bf753aa88ba38b876679d75d991882b537ee90ef88d5b3a2b22f7d0975c34be431fc78d61e3ae13b89f8f9b459fba7ae2c891cd23cc4c46fe05eebe2e2f32d14d31fee5a40d2a1cc92b10c2ddf6ca1ee4b6e500fdca3a5a17c36220a4f1ccde2f3f143ba781d98ae86aff1643e078c2f369485fd3570872d27dfb67380d269ac52030851dc2daff15e0b46c61e27c5da6e23afa9411a215ddb3ea73bf7eb2a077621d087837c3d93738a97f7bc51cb50902b5a7d80890be6ce61b6ad9ddfabe5006dffeb7e26a46c9d241086d89b67d39c2a50d8bf64b3ca00cf98dff7fe51b3ec113eeb1a524779029da2a9035f84a70025985d06468cb9632d8e60f77de66b144fdc4f64737433d892d0131519fb7a50e18b5cec9e63af5ae8760f2c80504ec8ed1eb8814b866f8079edfeb6923dcc6a12aeeb93989dbd1d1c9b55f61996244e55b18517744f8ffe1de5225bcb29c7160aa5b51c768d179c80f68afe2ea584c5a635647ab9e1547b51e687c207d844303acc71aa0b3f375c44ad6510235b6f47b36d26774ffadba558e035199f12f3c7c3f2d7a2b73ad571eeeed3f2d86ab3363ccd19fb21812ede680697daf754e8eb068f5c02e7b29bfd5fbed61954b927a05345e521adf866f2aa2c212e536018b76c99fe4906dd8c1f19240a35f662a37ed76c1c363873639b2ebee9df9b4dc84fb23da7a2722c4b5f0d219deb18360ca3beb553893d46e54d041474d1bcba9ff0dfbf3c987ae9cef5a650820d64304503987a39dadf43f44188c5870ba3de9f0eaf9b0fe229e0af46513d30fa6eb8abcb40391a1bf96037d0c15a83364766c0cc64585362d7ed780baa76b32f6b6de93c40a33168980de39163de8fa370e3ed635695be4a2e6c9383e85b6c4b1dec1a30b910eaac9b36b92055f90760b7c177e86485a9eb4bafdb3a813bc32846b3f613b61e43ba4483dd967e0cbe4094c6b611863ca9bda0d30d55007255a498ea48da3ec0f779ab96493bfec3fcdda583efbc36407b65a8db2036b91dbd0397ffb21bcfbe761bf9083b8755ef3221fbc0c0c72e70bd3657964ea2425ab6c4f9a2b3ea1049b113d0059fc5e704edffb668d637fe3c35ec85141556f3b1b2547da1bafde0cb2d634cd786f3063d392d7547cc6f5461299e9e9944b3da68bd755d67bab03d8ab5623992f34cf8fa40907f05c8d1639d4f50a8a2d3655f2251f12cb28b00a8a3ccd6e70bc61aebbd9bcd1ad5bedab7ad9fcfcc3768cc07630b0ff3cfacca720f2709c09ded1d71d711fe78da463cefa4b4734a5853d5bbabef1106ec1fdd99e9ad2e3adbade91fb3879e520dd9f0c97b2aca475c105e8d08faece6298c5c56faa763aef55e5f4b88445d8ced1a83a6a06b02a8e2f28de953d4103904db93295cc7bb6084707926ee374c168bc7861e97612d46de7a1a7629d21a9caf06fd94487bdeb78674b6a8ea35ae29d29dba797c5ca8d67c02ff84cd2e42a8de19397de35de3d4fc16bf7324d005b496692797ef143440c8e6901700632f8028c6a3db70a76211f2346362c43858c315a77cedd52d278e74e1f07e86e558d68bb98786c7267ca79dffe789c3f41eabbcb36abfd54c47c0a6017c7b7dcafffd6fe95744e1886f03b078f1819e6eaede92ce5c6581f140ea7fd44e5ab2d170b724a58665f9b0ade3822915304ee23687c7b87435fd4c750bb2d7586fc2d2f2fc3b3a2aa3fad3c2c82aed843d0cac7bc0d38cd2f20534d5d0428936aaa7c35239a72dd3a4815d063962034886e0c49be1b0fd3028c348036b3bcf01a4fa168e000caf8da46251a5b12474f827346a76d142c09d3a4df61cb1b4db7538fe8ede8872bccc7b70e5f83f1fb7f1b371aac3c6988a4f79f95906bf0532cfdd2028680576b228e60dfb1d3c7c504c7f59326ef050ffe51488a70ef850a486ce298931fb770fc7b6aee90d3d1e3ad9056c769bc1acb3b7b3cca3bb90cce4e3993c1871b066aa9425ed48891f479907a9a49b4037c9df3dc87e8017085864535a4f03cc068952fbe71ae42ad3ddbcdd389b4350867ff60923b55a6950b6e7883782300ef902c85127f2d42380c1cdc3060b9ffdc8bb15f1c833ca9ffa30f6f4e7381ca04b7d9e0ee2190ef573cd500b1dbca64c3c8fd40d785904976e9d37cef47601cb78018e9719c5e182df0914deb53c88789bda0d68f8cf2c9901c172f"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f000010d440)={0x8, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x20, "6eb9dca8c6395d"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f000010e440)={0x1, [{}, {}, {0x0, 0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, r8}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}], 0x8, "f5644f809b88de"}) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f000010f440)={0x0, ""/256, 0x0, 0x0}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f000010f640)={0x0, ""/256, 0x0}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f000010f840)={0x0, 0x0, "9c393b1302bb70d37a43b75f36641186f5d96cd67f9a8ff67613feae89ec31790794bae1420ca5575f1ff8a1d359949ff78d4f96de66b602064dd14e36f243b5115ecbb2b8ce83ca4f9ee7fb196bb90b39c2932d85d599f5596606f97a078388ec0dfb0d37240b62e9d69699fccc032014df56bc82fa0cda22002b2cadfca180f0e0cfe44d6745eef9b2e05d117a305425007e515728742a0284dec7ffe05acf11a9e5e54a986a8d3e31ab55e93433e87808ac25b7134307c5fec74b296daa8d997b2e77294d26b0bd16a646b2aa5840ad2ddf92f1a37b28b4f4181a8a8d8a6325c3b0b00d8b404e00fa9e325ebc482b4a292ced89b5a5e69cba1034c260d5b1", "3aeeaffaf12b444b924c25680c8ea22e4eb90818f86944861933a4eedc5fd11c861fecf859cba343d241c3f03273b60079f6caa15bcfdc51edab5acc7df5198a75309d8a910b577e79a03748fdbbb843cd27d63ddd4e5948e891f797541b9be817e721a2b625aad4a27b48f63356630e41d8c3db92b2b446cf427a02a41899e4d9f17d22e1a3955c60c9564bb0a18b608650b1f04b2247d81feb8b5ac7810c990ebbb957e05631b204279e7d53d51af29b0eb7cf172d06fab61bf8eec0b10acf208e04f7aeb17e1caf27d065ee6f630d1ec90ebb1d2e32e989cfec7567a7c432d0d34373b13478f51de5e01ad9a523b7950db45fc519da6fc87086f93b80b8b7ffed003d241c921f60b45ee9b428d6533ee85cc22c2dfc84f9b5a6c0978d706b629c751b97f27fc813f349a212ee6077dcc63423b5d57f7ebc60038b347b981d0c0eec55654218f03a57f02ba68b523168aa2516f40f900c42b0bb73e537f6cb468897aab82105b87f4ca27d8c2e887267b2771818c924595d8facc2fdc81144504e7d1a3bb2ab551928d619fa31672fc340a780282e75e392520d0b5d0f6c3d1855d47b3fea0943f18ba597948a153a1fe355aeb0475c2b2c0caa276a60d1272abf1919b8eec49ff7831f18039f07c7511b9bde1aafdcabc9acc95055ce3f485a850ffcb5d00f90fb04ff419b68261589f9486e75361340f5fd40b02c838b09022d78429df6c42f58fda36f72432ddd6e1230080222e4dd48cd5feb822f564e7c8c0e0d8042d34236a40ee66ec56d025d06179ed08b0b9a53a17eacc8d69f108468068323af73b2dc79a92fd55da885a7c2b4d3f21f524db39a2ca1aeff348d1f17a41742385b23596819c802876645854ea08f13fca81fec233b7fdc5d4b0f9154aad0e3bc0cf69b2180df35b4c88b3b1bf0d8ea1703980174f5530e13bcdd6060ca398b80adee83ca9a0ac615533965ef92b2c99933f1d4e822fab32f722d2240860bd35b1cffa67502de7ecd6e13ffed9babcc6c3493e17186c3309c84e1b8cf3d7d81af0b3c751ff779f1e02ec6980b765b1ffca82f7b865ec7c3a70134d1435029c4a7e81767947b938546762617b9fc8f6fa3f17b4b51eb64217c124d6c730adeb9813e940518334d5be63d75188a472b3bb22b5d12231a758e32f73ed6a079c9d24981288033bb5752eb8d341e38c8cc9dfe1f3c226373e4b08d14a772a8ffd8f0843fb2021184e0ca855273757cd9d0443ebc54d80028b15672f3c9e1fb6a378292c5bb1235c54e6d3cd528d5c67760a6633ac8579eacb45bcdd2d5992870551b03083eeff19f9cdf614425878577be491c8df09f3f3d7f42bde199c1fc994ad3ce52b23e7766af283ba1dadf99589fd4f7b57a8f533700a68f12bf617d08d65d77c24ebcb78a80f598cc4722605066acf6939e8f822a54895e1e471b6c87b64629409aba211df351d9e1211827f1017021d2152312bbe5f4566ea0e530d2b47e3443e8ff6a69ee03294364690751248d962c0840cf69881c9b2e98fb2887fcfb4b4226611165b5ddb44881371e0eef00e58a7626c0e3e7dbddf48f62967a66c3622e7839c6d3e528dd6b83baa3b001fbd462878061fd061021d4cfff83895a131881f3f9e7e79776c37550cff8993bc28d2c185c91e28e240d4d5991dc499b101915174200ce811204537a984efb62756d50bbd91c934d9b14ee643c2c9db9f7a43fe658b9084007a80266e8628cac477bee3ed57af6420d734b332f8a6a3c5ce1ee509b0f326db56f1a4dda5156f3ee6110c3c2b10b9994b2762ca8ed99dbe759e51668298df16adc978337ae1e514f791a1482dd1bd123c40604619e32c55d8d20798a25f139b263696e8e81dc9f21b2c8245a852e9a8b870afc34a785d2d499d3a12737485a694b61c7a47283a6f9a47abeadc0d8295ddfe819c48847d396e9fcf10aa154682e51030b485e26ea0ab4dd7ac9242af4613a2bd7f6c5826a833a6867a0b3af58ed4e96ec5a47afc50c3f759317135d75c96e014bf048c26b8443b6ad4b18e93579b2416b925aa8e69a2f56387bb5e57e0f27acc6229727ebe6b42053cd5dfd5ed3ee8864e016fde2f8b4f918e7816a5eb2fb71e36252990f3f0c3701749bc9647fe29c21f9a7ea119f96a945ee48c48e3a8745b4754aa771f6f69fffb5614deafbd9d7feb220abe3f874498456a6a9cfaf4c755980d1f9f220a12b598ce98d430dc59ee69dba3d07bd1207b0ccc22698c9c522cbeb97573a17e40084dce506233357cf2c5c840e31c967376224528cd1b987927d342c28c9cf393e35dfcbb32a023f000040b689b8f469fe1f04c68748bf9cf2e93c1b13a41f4e9ddde533132e1ea59f94df48e91000af42a836093ccbf41be79214e144ab114f66d7a57cc83b2911f03969ff38c2500ebadc6d2fffc4126e42a3020dc06c49c8a88ffe4e9a0d5b7aaa676ad29922c0483ba1b5cb640d7fa7bd88e76a7110fa79a7f2ffce18e121284b40dcfb8d5b9d6c2593925629ee71cd3c2dfd4342fa2ab4aeec46bccad6928e01dd0371ac1cde47b2791c49afe9ffea3d142a4fd9a388375aa23d5ee6b177b2b35dbb0d7e406e0edf01d4832b9ab758f8ce7008f799e8ab62a3bfff2fdb08e312fc1d418a459c27a7c336e77f9ad5a983781af660f58aae7e9f8bb144f7ae510aca2c537d7842ea4ec013fb9cd6ee0943629e737ce74514e38945f6ff3aab280a816a19bb3557e772f76213cd9eb628a1d33c641bbea9d35169e07f64a51656e38b0498883d4f8b0e13977ea5261a112bb5c1b0d2cfab4c00c57ec2982d1616798b0228f8cb20264371b42943fc488752a12cec3d1c5c28b0dc556424515c169418cf972f7d50700cbf065d44e14f23733c899e66129f531970ac7b8ba3a0b338cf87a62add2a81ec29a12b3bdafd3f5424752a5d92d1ff9b80d619a782d8874e1e2a2831d15781504223a8bc98ace99615b0541cbd4da50f549d9de7ab7c5479f2755b19963092f151eb848e6a624d988846c4b2813c3b275d2584d27a74ada73698c6d95869d433d4453580f34323aa8b9c26459d1a6e44daaaa03ea99b8defe11ddc7b39f94dc4b04c6b8ec8ffcef4f518c253d319576c4edc693700f68555fe584a96e426e03675bc6c511d2da9d417b72d6158778dc1eede4aa8a8661b126b6b4b69c739aa6c058b050b57ca08ffb6a716cd1719bd0328a4c75761a1e71c32e916fd3dcc9ed0e6e59130186c43a8a8f9134240cdb647b23bcf986df33f5cdc3bc261b6215a3087a0ab409e11e3563bdc481273a5d210eb783c389755a8a6bef60f4c1ddb0da0504b3a27262a950f36899b382c4bff7450103b8adec1f72a72103db0ba2a7239a1aac68b5978b9817736b856444bdf7e380eb65b21f8642b9bf9f3e191c8efb444a6d46dd164b2c1d28571696190c37671332be7f8a5f78066981a67b4acfa70185ed53af602945c60ea147664e55cf084af7c4696ac11ca1137a208effbc11ba2365b030daa7afe70f4b83f2193a480c2260064ff6e3874006a35a6a443c54ed5e97a2197b1a97eb0dfe732aedc1c33b2fb8b3fb780d4896acf634b24ff8cb5663b72660d8a43bb589705d4a6111e08b1664937466e50cfac41825191ccdcc9de30e1f5b37f1ef41d8d0e910c03211faf4939d956b48b6e5864d514339ae844c414bb38801ea74ad2ab342ca8458be9f6b4708b833cbeaddfcdb819889c58be7aad541cd9013433b4117650500e43e3e51d366a2c0cb188091e609580c538082064499058604d88a0c55786b290636f8b5d4d574559b3d1ecfab743c1e3e105991af0c493e13c0c30bbb5d8b459bb4e0db64580b74a7c87d1bb04aba7f6d6addc093ee6a9e4459a11d5e924442b231e402608d678ba52b09636ce11a8f5c779c5f7a7f4a82219c31027b0d3c01387a0f8752824bd25a08b9c8d5a212bfba0cc7f1e89543e8575caa22c2336657c6bda92cf72735d93e04213a22de9fa874794c41eb86664f0426786daa3dac81e8ecaa829d8895bce918a6068f6d6cab67026abb45490a45fb441166e31ed2df19e11e98c6c1cda683ad934e8a871f8471672ce88e588f8c7e567a7f30ff9e73fe30afa4a45c210195e116f76732d71ab3ecb84915dd57b0f63ec655231d43a53d624805c9de0e501060b7fb0127461bf31b4bbc6847481db69e95a8733e38ec0583116d4a4ce858b0cae1c6e6c700227935df9d4429ff871ccf3bd891148f786750fbc92645fdf0e0fe09305b77aec34e067c51594da3b2a7b2436089f30a16c6c38acf1de9f9a9fa37f5c2a8e647fbb84a0fecdb01e2651810075b103bf0687ec257cfc652001505086014534133126970d11eed3801dd7925037cce14e7d9e0709bbc582b7d49c6443d2bc80481e8e6c63b3234b703b054d426a489aa5b2e6ea222237f40b1179e91e1079ba55ecbaa7054c5cf27aa61e24937139bc4f83174fc7d817bf2bf7499692a82a45a1c6c308c23adc3ff8412e0e7b9e7e077f3093caed14bd6bb389067ebc503b66646f23989a383c941eff8c4bd8b6b3dcbabaeca828682b5208b0408949b713a1bc80e67fd3df6bf17dd0babfc0c093bfbc7e460a797a303e6faca9fad5121e1e920e2f8f96730622bdbf2c96b0a6ea8b690b50a36cc4136cda33eefb671fed4c6014827206539cb67281d6abd43a120243be3a099157444e6869a2d852bc795cabb6dd35c82fbae30e0446ea615ceb69793e5445d73419c44b3a6f88288605dd89434418cd0162b92adaee60746a828f415ba52f64d9db470895dc91e9cfc569c6a152b36a40ac8b339186448b4a34af1fd1579539d4bfcf500d866038359e23b7d6cbab14e5907e241c3ab01d6c4b050dbf558db0ec13669bc7d3b96ee49554d236a1ba76a2694e0f133e48942b812894560d348127a4586f863a6680c15f25d402d05e76b04db34bcf96a7313cc70c88fb64f17a1d242e01972ecb2f3633c13068bc118f0ad705ba7f10965f2743f359e5e55310f210e707bec6381660b85fed283d045f324987eeb59dd855ef52d1da216cb488daf76b3cfd8d85c842a682056a5298036699e447d9359696dccec254312becf25b931ac21c14277e66136656ad1fbb0bedc8f30076b694be3125367ff6b07dc5bfe00253ca7e6dbc9ff9e70efcdf462e328afa6d5916dd069ad3846f4e8d782f66a5cc8c84e10d46d07f47e0466964a628a83fec279fd7970417eca056ded566d9684853c1807d83d9ec93239c2cb841d43783a1a1141523b77f3b07b821c3dcad97755f88fc5c590ee5d7144efd1281dd1a1eff8daacdd7b97daea9eb5198378e6f79ab9466284f5b8168d34c04262b565d1d7030a6aa6aae3eb3f2c432aa264c758b8fc1fdcce69b"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000110840)={0x0, ""/256, 0x0, 0x0}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f0000110a40)={0x4, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, r18}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, r12}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r17}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r21}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r10}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r9}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, r20}], 0x0, "546d26e96fb4f1"}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000111a40)={0x0, 0x0, "99203f9d93b192bc53e1cb48e9c7cdc52033758f5eb09ee0933dff9e3b264cffadf67452173b038f488d365108a7415e00c2e81823c1f7eec0d4382e158532d07992a73f02c3d2ddc3b90bc980ce651ae38d96414360ec2a3cf3f3e78f7afd76b40ba3660dbc29c98872fc952e515ebdec566375b5e71fe6fb53990d8c62b488b28b6206833d97741fa82e07f469c04c6f41fea359f32b4b2cb8d13b508eaef484901f4bd936564f76c81717bddac14f8bd9e3b0db0a295e5c291b67f4c13b1f628ea70c79986df8f4295cc276287f457f72ba39f1b19d83c861c3fa5ca65afbb176bb6371a1b01c248dd166e00e39d217b092fbbf19c09a04fa5af9a34c51f5", "97ada3195cc544591dbb8b52fdc0d8beb033f168eaae16b0b665eaf0f06ee82effec83903699011d0d102869a28630a356997d7c424fdb49abdb4961b559be9bd6bf511937dda17cb76f7c8703a3839ceaa9322635a09b9eb9d66cf54a8c4981b1f9f51a9a71fb10e85545befc4c37a44e9d9f514750639393504f40122766a0709ae40a60a11118fde0e195fd6017511a56f4cac67b4efd497ff7c47015b8230f0c145a8fd8300fd8567991713fe77c8052fe33e05d1a4d05fc5a23dc77913f03bf8fbe971bcb75ec5b7a3d5500383eac9f58cc97d62c3ff9a70df6707f02b341346e575167829852f77ab8b48d8d6e44fe104af2e20cd61733aba0553d242d5e2b4b71291c7fed8b5913ab059f0f538d7f388627fdec229a4b8379954d47ee1a64fce35fa1a419e98b11432aa247dbf9a52606a13ad12c6ffe81210bc816842a9a1a83b6cac41a12e02ef1dcd045bc6dd86e177e55a317c12803352024e3202f9c3bee79176ce48880a431167e01d8e06297d6aed870994c0ea1446cf2c08abb8aa02f99d586b59b3b92ad40d5107fc43292d2cb35d9402e8313b20f90075753de28d18d8087177e18e511a7fdabfa666ff93e9682cc189e5353f0457de34e8a3d7bc7bb5359b07b575b171032da07f4eadf75baebb5d9024e1002efe0633ec1ea36cceb3f05333463685965b27ea50c3560ba840939bee9e0a56f3d45acd9477a5460c0bbf92493f1f4fb2b2e20d5086854aacd5337e7e14d95330e94461bcc26ed307b5059ae99c2cac9f9070780e0cb027fabc0659e7590e50cf5a15eaa4c9b56422a2e0b370c7e375439bc5b1143445bb488b125bdbd5f3124740c00dd6910f1cadcc2aec1d34c7b00f5f9e294542104269c8fc8224a727094a0ffad01c564f2cd6fb0f67daa747744ca9c9617233906e37f751f3190e49d4706ff70e63e154bc9af6db191afaa0a1656922df20086045e72291e41eeeb0e2f9ef17f5df35c8a93b09293b46ee312503c7d2232eb2a8ec3171fe6ed01ca976e3f76d34ba1d9f54016cb03b2bae4e40e6207317e95b6b4fd48393d4fb22c27d86f1ed58db2218fef92b5181214637217af24c22d2b45ad35bddb08ff21ee15d172bd6e23959827ebc699c824435c8aea3446d2f3bb25f28407a7905356a6b83b48fcbb49b54e2353e62e58b796f0489a99830fa3a85ce548253705226bb81e8d915ee9b93c8fb585d0079eb117d10a37c3835f8e5ca19719c8c9e5480c57c5104e3de7fcf9500463c840a8b2ad5bb69b37e7daa6a6eb6b940740dc657c2b96830ce021c1b23dbcbdae438f3112bed679184cbbddbfad2cc91f538572b9b2e02a3b3ab35c8e3b747d75de0ea7c2931c24aea0ca60bf8b32386c202cdfc30f287e43fd07ddbc8c5a28989ed482299378d84350f5977d33abc075f00b3235cd96f39e6a5e855cbd87427ed051bc68973dcfd51a193cfdab6026725fceb6656853b79492734d6ba70c6988168b83567d0625034dc4630e740de1d67b72cb72535f7acb978594ddf2f1dec40cdac5b6be001eedc7a2c77e1609a85c1fed41973c5f86df465c571efcbca6031f5c99145fb34abbb233f222913a83d908ef80feac0518cb0a9e2cb54ad2126a22777f0bd620bf1856bec8c8fbd0b0242ddf1e429367ab3c96922741fc2e96c31378e9e4b47fd04068f265050ef2456ed3014d9044e34a86192925eeb178098ccd1458bf35b3d6231ece087e9362110d3e0158193228bd235ca18fc4b258a1e8629ed6543a53f21ad9142b17c26fd99db7d06467c6da7fd7b33734adc8fd5b2e2781badd3fe3ed86cc5c5e0c601bf96c9d1140a776c7dcc155780d98873f10b805133c9a77b99035b6c45e78fa6a24896f038d4eeeeb469cd798663c03b424adda2d2caf5661c281fc151b4a92ef3a18025c8df1de976f3dc35554e90d059b5d753102a409530eaba273889ccb3179810c1e7d87293025072ad5c88a4a7c011ca22c33dd8996395f9efe5951a8cfd9bafa75cd25c41269bbd39f7316ec4eb6de7de4760e8b2e002e4c393b83274453cdc835e3a10241112e2a212ff5bb93c34d0d84181e9a7ca6cde5a786c686ca40d2721620af844501130b1ac75982f62a52b5f0afb3cfce5fa5fd02477b2f4760e2c5c81a01eabe60d33c9ac769d4801d1d2ce8e3233d40f3cb9391f6e1da921cafb39a3b6cbd691e273a28ea9a81de91d87a44d3d3c49a767c8bb53898cbc0ab30e4e5d64bc3f5fc8e3c9481ead22202c35df22d20c6c62e96916e159ae8c42a14b90d62f9454a002cdf8271d8d2d56e0a6abec4252fae6d8a56ff5200493378fa2c2c976bf8c1e8a11eefdf2fa935ba900d8f75fa8d4f3473cc8de33d982b537c8e6d9f863c0361d0e533db4b438693e695bf2c8c26e3ff305d9b1d892095abd6cbbe7cb26e120e478aa3573840cd487229614942554dfdc3621c44c0303e6dcc3c8e83c423b347b16b125dd0989834624af122f7cf445e57d1d3e3c329321b5732d433431a7a0bf637244444a9c50a735aaeacce81bbfec8c5a1a0d55c19eff11e7979270688c60cf5b880a859e31feac30b59701f948d3cf542a77dfc93ebcfdad25d06fc81bdd21d1b79eaf16da90eceba39119fb510d6a368ab445aacbfba94c259f3fb459ee2cdc51cde9743f2519573eaa2dd33d0cc883e8fd2f4cb410b1caa653aec0b6fc2036498da3f59ec7e0e68c60f2489f69f667d68d99acebe2a949bde63e6d3eb0b0b33c9e7f2bcd92eaaba1cf5e8fe855ee6007d6e3b7d9e964222b10a239620c385b2842a8fbbf32008ce6386067dbdb0bb1340236aba4f5491a7e873737b5527992ba78e3c315b0ff96f6b0c5b5c12e6afc564059f72532fd71717e60a380031f645ead011df11983ba9e2be711ba8572f73a86c89fd93d3a83d4faf1092e929eaf4965b691f66fd76369814a3ad55acb47b7f067299b69100e4bbdeb521a79957bd9b297426e6a0c089862dd45b27bbd56596a932514f6ed6e9bc24d000894d716ad180c477c9eca377e59734ef113273c4a7f670b3f3a5ecd0e0a80cfc5a0af573d810b086a4bc4fef7e2ad56388fa773bfc649beb89d063e4b4890521173334e7683afb9c921c1cc1bba42a14697d7a21cdb1ab1dd0923b0db46240e82b64e157d94f43a98808540747e0b9902177c2d8b2cd5e08b61f4ca0395c47ef1edca09a43e958244b5de406a276c3a7ee21b47d693fba90ade97c98f6a810bd2df8198b11a88b3359e8ef04a43dc90a695ecd13ce0a65bdb6dd57b85c51d07f6233fa87abe939d170bd34f280a4fc0d564c452fb4c69ad6eca28bf9fbbcc7eb5fc1bf114c189fa7fc008223dc8b221378786d3b80c45c8f7e1974f3cda0a8df525fe019aa406f3cc5aa1c82b83edfcfc84eee11402d99b468e7661c9eec1ffafb06ebf36c00d98f9db7b14541003c99394f4c9cf0d4a49b567e8fadf67aab9335a616f026edd144f9588b9df6533f16f7ba9be02cc2e6ba5726df75b5b7eac9f7374396ff64f59d6e660625af919510b5b3a2702ace1b85b5a4b3ba4234eb285224db5bb2f2264b86c48713af5de53e8e84590873ddb8174fdff8c26a8849aeb3f9547e645bfc93be57a4998edfa6951c13f897fa4d252ac99b0aa00fc7d4eacdee10523e5ec658ddbeabbb3e62ce8e301d473aaa15a4892601651d66f41f5135ef77edbdf1f7f5e6e512f6055619dad10e872316ec8bf2d7d4189909e6c422456fbe570c92a46d28bac257b13fbc513781d28985333d030f6ee76c31c8d1c9f8b343856daa4dffb1de1efe31419a74a6af826bfade9a90209e9b6c23f721e333910de8c665fb8a3c160815dbcbd6d24e0a0bac9a583f21b2cf4148c1c654c96368256d60f039b0a6fa9dc50de9a6fbfc6afbea8c7017e8425a37e6cebb8eef1baa267b81da54c042049432e417977322c960d8bf4eddb01be9a93b9db47efd72c26cc1436e7a4a346ba1b467959db32c2d1b222f0f78cd26f96459fbcec8d1f9fd6bd1e73ef7ae331ea35280c857f5618fc6c31f9d667d772a5ee4178652ec6ad1b88a24033aefe60b080793299c0e73e96ce12c7781a5df627d099e161446d2eb9a98cba220e4dac6d8bcd7d3f02d8013828c62714f5b4a7f721ea982be1f8531087d72bf04adc0446068112dd627163de2f84c088139a23cd819baf82df93665c1cf8399869690a3c178f7460b9a5ca0cd8dad23f57aa3b10ad5b683ab28322f24b5ed099404cbcf51fa86a1951d17517a659af9a5a2970c163bb12ce4a202e75e67eb6600b49ff3a078e70a55c73f684717502576ab3ed2b28aad90aacd1938ba86c59092d708a7879f8fddca76b70ea298832a29d17113e1566c525f94ea6fc41a8625ade7332c1f14ea91de7d859c6e95a002c4ac52e7d8e5876cf0aa1a3dea79b646e8ec42ff3d0e42fdc4e071c3da2b68dd1b89cfdfce7290ddd74f08c868d03c290b40fd84fb0be845ab3059cb8394e5f1ccea80e567e784abbdbd1b9165c1eb29ca2c19f63ab0817710ef3dcc6cb39317492baf2561c65c317f0b178d2aa0ea44fd671eb533de70d3d3ff07bd89d7bb0f8b5d6aa93cca5fbc6950091af8d92052901d346609bc1472d5e2aa04b8aae92a0ca472b33c83c2576b8dd05bd7e80fe8f95681b60b487d4ba18a528453584685f621533b60c46f09ee88584d500d7c4bb0b2c247e78d558463f9d825e554de05b245a64aef4865e3a62edfdc7b0389bddad0cc0ed8ee40134d5bffe559518b9e0def426b13edd07881fa28ac18f070f4c206b0e619bdfd1fe013737a15aac793f604d6eb3bdbc3c0fa37a3dd851d9ae3284ff4709335082e91037d23ea97e0b5f6313962ddf7d9b017a52ec59d6ad2dda0ff082337e8b0dba11f4de155855e51a6425678da5237d3647bc1f55791b6cd062f36b72df5f0f83ff2f5836f57186ce9ddc99cbed2f4851aa6eaa1555579dcf365244365149e50e9ea648ca75a4b6faddb9ca784b23ed3925a14b3822b6aedd8cde8d31b4db1997f81cab35423f090219c935acfc641ab44d7f01353d7f1cc1c87c414b3580a7ba34b2825dbfe3072c55364cc1d9d1101ad9528fddbd25e73097d14f84476eafb01be7f594216fc2e6072dde34a8a1b24b54f435e4d44b78b53ace3e2e4faf82540c0626fc36816e6a001d86371c74da7ecfa4c2e01db3b5a6bffab5d8bcbb88e37b24546750947c3b59963d5c7008767c37b73413d70bc8d9c2a5e5876c889b82e9c429d5a3d6d9689f2e6e109be54a086bc5d385b0bcb463ee94194871c4ffbb4d53f9b21c06ed256d50f0429bed3c84847ba5d54ab8023ace761c5a8a7179fb574716d363b4746c6734a8c2be9bf5c0e057bc94d9384159cc7d1d758062a4fc3cf2780661542"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f0000112a40)={0x7fc0, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r16}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r11}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r15}], 0x0, "7982d933f0dab3"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000113a40)={0x0, ""/256, 0x0, 0x0}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(0xffffffffffffffff, 0xd000943d, &(0x7f0000113c40)={0x2dcb400000, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0}], 0x8, "400269e29b8298"}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f0000114c40)={0x3, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x0, "8a18608fde01f4"}) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000115c40)={0x0, 0x0, "df4c741514078414a26c3ed11f3ec1d9fba039ade73aec48228c4813156cc6183454f0c572c195d6f7a68743fa18124a2925b35b2815d979d6b1647e4f59a3084dd0618bae94af33658cc630b783d629bea28f053d9e2fd6bb9fea2a2cef94f8380c2d74b03dcdb0a3840c7ee884a7a7b9d8462dea41f86e7a8903fedf5dcaeb40a8b93d87323059f50a74fa919d7f97901572967568ec4ebf6dc8274b1a2624806e168fdbbc3c688f777c20be204db8dafe0158c448bf85ec8523c56d2d536b888f76d74202213e1cc65b0e048e8975e1deb9aeb70831275bdb67df0bc1a03bf1fff7f320fc25e1d6a7aaa828b2e96b27665e693cf309fdf2ea25dce1552c67", "2611fdf555ce8c2ffbfd4e8f9f2eb69d1d2876ed707aa47684e757c8f50b36a012fd2a8ba9c54109b675e64824ebe5775d96ae080d8eb4917aed7eba4d51033900a34e26860a42071e8db6e6ba52ac822f47441f78eae48d085bd5216247a9a425f9253b2e9e7ff4e14c22d69ad96ef10131705e597493ee01c77c4fdc0010d6451fceb0511ba2c81952151e784710d69c201e9008226477627c09ddb3981980763d645e9ff983639f77f88743b77811933d49621b46b99a99020390fac52a980424797fc1e302e18a4e6761b0a762e9e6306ffd1ab704e2214d8309e451a97394fe6004bc1c7cfd956d098ba1c4744abbed58ce88c4eec38da9d2aeeb8cecabaaad71a4cf80460e6346d59f9460690f5113ecdbc8bf485d48510790a81aeb9f31e7f2f2ca3ffb8e843713471323fc9680a13a5356bee98de1bbe57a19ae0445073c97a39f135b40ab09e3a57e7fc3146c309611692cb382210a388cc607c6c19736cc58c1e429b6ac461daf1c0280568c5fe56d5dbbe6930a4d123a0aefca3464c3c7465500fcd7e919dac77801b36cff5af84887027f201773f61e6b48a89b8530cf51f594e9952ee652339b56271fbe156d3b1a3fe4ad908f0cf1bf50e4edc1d10af9059f5337821daaaa49d30b5882461de7f94be600c0e81952a25960a0711861c9f2242bfe73becd1b9afac43fe8fdebafc32ae5a348b0932d0c0fdb0426772ac05e00410f82aef68a88ea8d456ff36cf69155a30b7c9099402723d7405253342bf88e1b5761d6db7c7186d39a573919194ea6a7db9945d74f39131af0b77cc71c29fe6991b593a450a2800c6e82fa4595958939382101354a266aedea256a26ebdff65cae806f9ad7ba7ca5e4e90d243cd2665bad9d040e12bb52a29353b6eb8f329fc49c08f0963626ef6728ca13ccd4d40a9176c567d428197b3bb40f4e817e52c90a9eadcd5ab17018cad8e3ca50483cda21f1f10407b57a45fa4c18e3509bd99934c5fab265a7cb2e41e61f322f636e998e55ffff6c86ab59a9507751f66e039b130053d506919ab230fcc090a869eb579c8e7a7c9b80b3b37b3b7d4d5b8ba9478e851b4137d7e485216f416fda4507179ada11853ef150b3829269e8722dd993362e5ae3f24ae88a0dbd71c2504ee3fc713ca4e5e0bfce1af96899b83ed636cba33d48a3d17914ae5ed5dd0f4b4fe117c1f63b87428fdc8e2bb61bf39f1515dc989c7ef7a0c9aa8f7a3ffd1145eeab2853a40087ff42f1a61cd6cbdb00ee7ccef5dfe5b14b2236b8e923348e09424d1c2ca557ed6aafe52a356e6462b76be8304b3d7fa80c53a7dc8fa8e4a55febab054c2ba9e65a20b937d9620eaf22122d6590cd370eb323b8fa9b87acfe51ae56c02d3e130ded6f969562a5a9e67985646a70776fd9c7e5885fc6a9ab6f997d7082bc2b744af471d9434eb52d8fe19efdd5eccb75cdb037d7dad33950ea8976ca8ef51e142733f727c972e3404b69dfe55a4f1df8bc3088c45b0c05d886cf6c78c094f616733695a3482a7ed5d28b43730efbe4fa71007aba925ae265846702001c6feceddbd0dd89efb2844d08d0a950222643a53112cb7a6ad39aaa87ea306ab9e8142d1837c8ed82d0cc92313d3d7dc67fb57adb0470c8c5417310e5f54f9524aa8f25bd01853550a8a0a0fcaa3340b6aee239fc3e0d30f299b768a9f74cf521686d3fb5f233bd95c7b4120a913fe107b4cc9a43be149d5e1bc26f33ecf57888132ed146216f765a8eef3d988e834d1a5d352ea944145ffdbd2db976bb4d8ff6e136df4301e060c313d5f72ceeec477b2169f8cefb6589a41b31807ed71e781fc6decf7c822a7cd227eb1da185620ffe54f0d17acffe2568af921570afd77974b04e1c6442dce26eac8356cff4d3088cc1e7cc1c04d047d345ea0e7e3c779a0f9d766195f2b42e35cdd82fc11011498ea28a2e25b3b562ab5ef3f3274dd7c1eafd38c9e323627efcfc3d831cb5c4c4a1e743f820622052fb3f418ee380203aee02249a5b9bd28bc12dda2c4ebb2c1a3961f232f6361633942686afec4c8b4b02ade96a7b8c1b9b5ba9b4ab08bf0d298dbee764df7f9dc338d4c4c658f93c6735abb4549a85afbbb935e79e6f2269c46d68f9df1d6957275f7ec27e47dadba07ac0b4239959293e956c218f70e1a47edb3360deec97a9bdf06bf87b571799f193a6ca5053efb59c63062ae1a290048409edfeec83a03b251859d4ea6e4fdc233d3c67fdbeea6594880e59798ff61442eaec1f28e34acb3ba81758f3f04d6556c4b73532c08ede1396897379f01c394c87db124311cdca8f016f1ec3b837da9f3fadb7b2611cda7ed58e9f3f5c0733321007f5f4f96ce8be8a01d5c352892e845897fdb72eb506697c7a15141a26f32f35d3fd969bd55565e6a3935f9450bb4889254fce02d4c005e518738e7f969cae7246521b7d4606856ff6533b95382ac612a2ebff9ca86d1f930567f604957702e7ea268f580b4e75fc2d46cc94beb78d6efc8db6e1a29fa8cf001fbe287b1b924e5cf9b162a6891bd84cd5cb86bdd7c5e7487316d01f543f84a57e030d96f9a1a560410e0e678c14b726755dce98098d9737bf450ef21795b307976c78f29cf680736b1141d24a6352b5c8cf35dd860a2320cb632ac3aa8e4e7e0f84e1dc94e0772eb0032e2618822fc809f2510f95591365aa8fc63b499cb6ed8cbf491b47d9f32f7e31815f42432d68a40fef3480a8e25f245937d64e566a0e58a5b82691ba4540631abcfefddaaf6689935091c103cdab3593894f048b33bed3556fa59a4aea13537481b8376b64d90b9adc575786abaa0f363ee0c50282ac7970f159fccbae559a0b61bcfa685ed5934b7624cf66050c85812a2b970fa3d7599487c0d8f0ee602f84edf7d633ce55b00b0ab4c61e3f38c7753d7d42e5c58cf7dd8b94e66483ceac52966ee28d14486cd6fc49fe2e4c5ab5dc93bd2345499ec5e47f375c77f59117dcae661977147685df47549217b73596046b6b11dfcf744438b1d95b613e84feb7fad4809bfa46f9669c7bce2887427e03f3e3c27b8959aaac8356939e4611522e30016062f58ad8fa0f253f84165b2d3d1be6f4ca5286e1330503776a94018707a30211f66ec30148f21e7c56717b7224a89a12936422e07aaf8ade48dedfef466053df06fa51818c8c4a45daf8a1c5b5905c360df9031868a11089688ea09f09a3269e50045870ccf91fde5562ab0cc8c1682577e273eaae726a54053a02a2215780907db33636ede5430962163d2f3259adfdbb4b41e330a1a7633864925344bb4303b1dc18c88d16b55af8066fcd48fa038d76927c3e706c744ce61973ea0de7e1331f738f6a59a337c4ca748347f87f4e0ee9773435ae5a63e6e173b0e6f8aaf587e92ba440cbf5889e42a9b19bcfd64f130441784906fd961c7489c8508c6b7ff77aa1ffb778448ce32bd6055760204897146a8cf750ecab1bbff7816d6e10664a46801a5616108b0e9a8a5f64b1cb4e9160747d47e22587fff384227fa04ac11d425b6e15318945e4f1ae896fb582aaef95f38611cba8d424390976d34615d3a064759e12089d21b86cc7b2fad7dff17d095e5dfd28f28bd92495c6c6c8c7be42d4de6c063b3f336d21b06f952c715b0bc33c878ba693808ef4766ab2b1ea9019619457dbbf87f92e0ee374c992248146865e5688b9e61280084ea17b2f04f56cec4c26c7b5953cdb51f29ff415387ef1d6f6cd24f21a7c27b32ba30834f05581dc3e28404063208126193a262376ecda4c762382cb5fdced22105504313db9dc284dad4f4650fcc569ce3ea4e44a8ec5411867e437afa104d9bcede206c63abfac8af06b3ea47fc3d2f48e85d8da21451abac55207ef6bec4c1f89165f864cea8ba76ee4e45ef3b08636022572840c3ca69c061a98871ce42a0200b8a0b7e30ce520cc8020f0e7593a9436b3ef2d099ef2eb56d1180b148c4c6d4eba9ed9f1e7eccc5fa6f0476fe14b9f07ea6f63da51a7f7bee6b6c61d7c4c019fb6f57f50610515be1318f0d85a86963c1f420d219d1264adc07dcf1161bcaf837d6cb9c3d14bdece2382d8c568e3b45a693c97296c4f5f37d43a1fe941155395c04b95c9fca50d2dbb0a3523de3ef05ad561e2ed11cd1d2258e1e7c659e6b96c7fe166799eb7653fb09881bad5f491de9dfe34ca306ea48b1dbbbb9de3dedcb84c8cc57dae38d183c0876e746b785c03e66154d52337e0bf9ec6a28b603683da8730cd620b71bffa9048908eae95feba58edae99512b04f175e27a7a67394f868f91f17b86dea5537e5f256b47ba305a61d9e84ff77672181c6dcc0ca650bb01a5c45578501e0c416de1a41aaf16abd20086ed6a515c6a1442cb784662656d1fa83430801cad150cf06ffcaa0f2ad67055389f6fe5558c2689bbec141164fc69833516ad8512b177b65d9f887d286fa95a5d5e36cd2fa8dcf7160db2e9fda97a2330fadafd4bbb6672146d8d0af184e9c131a7a18c382c99d34e8bc31754a9a30cbff1fb4fa624f9e2dfdb15d53c5a83fea362ad00fc0dedf897ab01c323950c0da1f4a1b77c956b40b3f796efd57ed1cd850bb754a506f675da039627971c6438d6b7d6b1626020ff02e724b17971a9fdfd1da8ae829441c6f0247db0beeb02ec96a78ac29ed85337c5df3fe43578f46ad920b7bd9f46898a2e002a4403a712602813a8fb8166dd57c1ec8fde9adf41da80e69a920be35f495c4d9066cf9fc23286d2d7f6f8ca4ef9a0bf28cd996893912c416d27b8b330a5788be0d4122b8f110c1bac5406210d4679a3454fa69ab059b13eb4a070698290c1722bd4eb772a48334e46229a8f4fcc3e85eefe890593e9860b4c82f509a11d3832eada0b913adf418209a7a1f63fb1c427e549ad90c9e619e18f0eb454e577b2f03cd185bfd8efb76734f8decb306fd15c13e5777eff49a5cb61f016b369a5609c38ac436b3f1d634e075c61d3c100abfee1d418f2f2c51bc4c8a3cf84b802e6c9400a6be6f08b6477bb5f14c145da47aaf972429d88012161d7f9662af60ef363c21ec1eec4813057cff08e230611f8a26ca3a1078e74b4fd37e0dc4005a4f94c4dc662756671a7e2f20c520674a7d9ebfb266589f8b0e0202403fae93f31731b27621d1ff72583a8c84ccc0c328fcae3d39a056c8c68f95b1322c61e3e7a13854a960aa84074ae298ed2acb2d66434db214180bd77b0359ea0a28daed11b4485e7536ca8adc81a0727fa9701fa1019e24115b07ce6737eb0685e33c880fb9ed9394a461714fefcc766a2bf2c0a42163a6d64412f1a364dee3463a1bbc2467d8446108ef2a2304380c9acab7d267d13eda72a302c2d71c404db3f88faa6108c6e16367dbbfeb2b716146ec8c96697"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(0xffffffffffffffff, 0x81f8943c, &(0x7f0000116c40)={0x0, ""/256, 0x0, 0x0}) (async) ioctl$BTRFS_IOC_INO_LOOKUP_USER(r0, 0xd000943e, &(0x7f0000116e40)={0x0, 0x0, "8ec903bf053f652b8aae9940fd40078b1dfec6370c72648f509174a094cc64cc579bba1168768e586be2c2db4e210510b3005dda128ad1453a65679256966090f427d64217ccb47ca2579489a475ff67a264e4764b82c05a49bc99b50cfc33b2c542f0e143f0fe830d94986ddca872db6d5380bcff3ada0a872edfaec7a6729cfab753403f8df28f2b046e80400ff99fe328dfe4f670c53cb067d70b42bcccb5410f58db6b774d849311cff2f18801cb6b82cf1622dc283b9002beb64d9a5fb0fec7b3bb6b0b6ef3cdac7d36c8ac6a3771b8c4c746a118934725c8c2ad4019881acd243d3ede9cfb220a56849d718c38992ee59b069850f657b22e08f778cb46", "03dcaf1f9893440d48b78544e0f663e1f5b1cb5b29f32e7e1ef8ff26d1cb286aac2898d7db4bafa536e56e130fc6c9de579a6997ef306449edd25c1569d2d1c14b9f93ba28142af38a98678f02fc97c8ddd08b80b1b48333d58e486f4cb468c75adcbd2aa34c550e9f29b4d61eaeeba87ad9f4d01fa7108f82ad0d1b8e02cf71a0b325bd82d2d3faece05771556f94bdbfcca8874ea24dcb8dbebb227fe97a3288256b1feecb82148ae1a6f0a7d7838f16531cadec25369e7ec40ccd87b3bfee78c4ebf62f628a9ac0b05db442f1137830a2df8a1f5b2ce045b7c34a4370bcfedbdb459ddff9e9e5f4ccf05753f67e0e5357742c8b3086ab1247e4ee107eaabdd73d36dcd0bf045eac2f758bb9ee6966c1314d742473d3cd07eec711b3f62de19df36383723e62f6478590dd2270488e5891bc86e853031ca7f0c1f45d7de3d12d02c560590785f31b587c629fbb6a6e5407e81fa2d4892078237a7c1ecc0057ce1d36b70b604dc3311596d38c4b7aa0d3cc404c7b35c1f6a26c8e802db968fd10f0684c3b026aecdda4b31dcf8e5faa832a856af7e83965518341e8657d46d7d48b81242aa637d66f03be22c4e4cdf07e0be6c568aa653821861459b5eac765d206529fba81bda37a2636e73ada347beeb29ef57d5d4238626623faaef94453b8bd956eb524c9ac2054b1622e6c50a0f54ca1814d47d1f427b198a598624e5a7acba4f9c09cff789c9a48af5a924ec0dfb466726cd37ebaab478e258a80a125d2cda04042d0f00a95b459ff836bbbf4283e0cf6ca970625a4889e19f7ef9585cc372eb8a9f3a988ba197cfa8e8b637fd8c652bbb70c368eeb9324c3a1d4506e4a042781b3e7e1d1d8fd675b7282aef6089a7860ab3ade6cbc303c679fb5b527c86ca41c972bdaf7db11809d47f798c0987a88577da04b57be3ab91cf0a050aa4917e5ad7f21dcc4feb81243e5fd4624a8600c33d8adda0c65b8720bd62dab28cd3c358912beb90562d55c3207a193423fc7d77147cb1040508742c4e90a4cd42f136f21c751582e19b9d7796a43b604206045e31003ef576bdc77304ba89fd9c6616b8f4dd05b5bd6cea1407d1cf265ad9679911ef63b7e3ca967d0631981c784caaa96d4f66d3e6b09fbd0179df9d1c7b157bf9b6eb036c159fe9d703f6a97d06ddcebf0b37cfbf295dff20f76fee8dd4163b9e92f9fbccf9e088d7938493bbd0fa2237bbb6ae2bd601fddb10b3b129f97784bf487b4e9c0a00e1a529f634bea6c1dea9e0d3026f24d0e5ed7e8430eb984f38d6f9b4782e79d06dd1b2ac9c5c35ddae25e029366b03b9d58d75b0fa4b232378882094254e879353110e5541541de17189326700b22fb8cff5651cd7ff40d89b0bed3b45c72f0568aec709fa333a7131273b1b7eee403a5115349b4cfffb47e670a01b7aab15aa23df7b01b8b8fa1846f8a48da5f5a2aff3c1419991ee4ac27d43ab2097ce1ea27572b4581c56cff363312dc9ab6a47391a2ab97956f7923c028f4162fd0a58f03aaca5bf1b18f5c1f0ce3651bd18b73bb2c09c1e060e372ef2a32c1a8a4ac8a3b78545ed079b4813a2356e9a83c56174e6220c7f929ef23810ef7f97615294b86a95e53310edf3fd8ba1ce591499d199f92860d948b8af662e882a432d5ae9066054e6c6e0f45e83d9c801923029b649894697ea9a18b161991ee95907ef9d4fe3f2d28c95aa8d9b3ea6f63db1b5150d4a7b3bedb5dafba3ec25272932d3c5e622bb40c7996eb992a21dc99ca906ef28e9d828430371dbcfe90b3be0976289e2096b2a3f4fde7e462fe89e6c6b663c49e873b3dd5c3eb9cd45d1c64543f17cd35d669c041665a1a363992f4b86b523e1d2ef9be87c08c43717dfe231ceb3ee6377aacf76cc483e945877a4d4548897d086dee815f1111d4403010d3bc87ae02a307d5a89aa6c14f013cfa8e1c7e724ecbb89447c37e0f2d13591a9267e5e5bee8b17ca28f7ef49aba56046538e21e251b9d475724f8bf36ff7329cc1c53a29cb610d1f1a465f966e1539cffa0256a8d83757b4c554b51a8a226cc1fd5f3a1a3f211cdb2b966b12d3a944d59774ddf19b1b04a693a69f22025a872234d9bbb606b26471709c6cacda5dccf8398857b4abd3a14f81a331bde2699bc231bb22e74d0a9c84b4af494f89e5d934624b9b1c6f6f7f1c147a62f1e6ce408cc3aae2f98f6ae6c905316a202b568848d70773ba4cc8c28373f3674e4cbf50d47512a5ba8c90a9e9862bb689f56e42fb61101efe3f78bc460c76c7802504f8931f7f7a22cbbff1e090cefb39644ca78ea3317b1ad3eb8216ec67e53e8243cef030afe6999a5f7fd160c2f7e46612536b5480b5876609349111108a2b4c2ae6c3335dd7122d0e5f93808b454df43e3d9459ea7c75ec9bfd1b15188d7e890141444a34cb0a505d8d34cc50acd8bccf80f755b2c0aef0613d9c6dba88a323c318ec36ea19866c210037b7719a8cadb418d046cebb55bf2f134dca393bc2cb5278102584715e1500ba68208601b587ba6b3e026b717c609392772edd60aae8a8df223c51e8eb005241fba1cef80e309dc47e3e78cff043fa96c48ee02d1e0c507da17bbacd69dd08198a1ce2ad09e05a78ed8d7bf1cea454b8fcfcf807766300aa2382c3a9f0f79aa2eb85eae51da00d9364fe85abb40b3f63df3cdf88c88f4612fcd2a4be4e683a87863d520988922b630a76f5c3d87c89eb685d69873efc4604852a086f290f10c8ab6d595f2e73b3dfcb27cbf5b7990be66594a1989950f53b696cc4ca96a669e676b5813d8d9258aa231c3159a7b5d9a1f97cbd537e0e5e78b765a41eaf3a182b56844cf4244894196c7a68dc6b4040a6728743650a83e050daa580f876b6cc4e2d9429cfa9e8052262127c3d59857cc58fccd5909bd7e48c689d3c75b0ad8d6ece1d89e044442d7e66a71a53740614f8d59230f10a58b7bff957fa3ed6a2b87c1760eb8d076bd948b739c36f693d7e479b3d5e909f638c2cbaa8b0534ec930e97d9c4647816c4c350c7f2de0096066b6c755e8eccbebf09a670fe17f2b1ef2087900a6ffb41f078ee9f4465efb073abe0b89a4904637e2ffd81d2cb050691663718403f38cad4f3165a54906aea1bb56d51224e974337c46ff3a91eeb1679aec87249feb0cd60883ee9ad7ef9e823880d2de9abfcfc15148ead2f2caf40735bb02bc7491c99051b9cd382b3b84a86ea93c671e78405f3f575cba517af9799fe436fddc915938b184aac511f6e502b3d75346119858ab29496c96ed769db712b181d069ae5d6b68ebd21b5deb6611db19cc88a63d14076d19b550b0a0bfa5065cb0473ccd9ece619806c8034ffe05c5f63640acbe5869e3e97e4bab218469f7e668213f041849deba3f161a2cc3d26bb8085afce1202a3b6e716e6808f05ba26258865df71d69a07ec8dfdfd1c325b5b697593dc50c049cb8faa4b1b03bf320b4daee36ae70a0a0d08d541efb54b5220360698b8a568d4772c4394f49fe494496dcc007429981d6d14574d655f9e0db6a2956bd686a42e8f55693591657d919cebe24e82351eb246bcfd0a6cb378df29e087c33a39e0f94e6e3d597544aa5710a1695c63cebf43efa8f18a00903f55ff578d89665e25e742ae17f8fb8ad4ef93a8ce4739f3aa1554c559df0853c67590c2e3f33e7bcabd410518639fd157e3d21ae198cda85a810c9841e2d2be0f04b78d3f3bc37518f1f4c3d87b08a06ba43f38a500046965d8c560bfa78f4b6f12f77b33db1caec0cc637577b8bcada413139c5923b41355da8a2059bb1ec0f055477f49977584fd529a9c6b30ea6847b91cd2782e63530d54d53e2ce042aedba3abbe2297ec6020ccd5b793da75dc4959e9fafa6a11ce6a2c253f1e22948aa64f00bfcc5bcd32e8a1c3de126f40164ebf9446ad056618771d1d2f9ace2442d8287154852b53e6b44eb0c88fc80f7345de164221c6fb050dd410a8c2cdc35f605fc462c4b8f4a70684b2e35110751609ee1e72a18c3a34beae262c5b95f2e4ad5cfdc09279433609bea402f5868f49c62226bb892d0fd881e55a1588cc1be9584277998cc81cd29c50f6d83e589269570d47266a09515700303ca115f7e958cba81a739abf858f1bf96f8fce97106abc9a2b1482cec0f3644f8a852a566f1eca56b0db27bcf5f48a2675df2c63c6191055214cf31671616b16d2061ca030cac4a71d81dd4313e9f14cecb9f9def8894d5a092dd212d970fd0c6fc71935a6b20204d96eaac97d137220143b259fb19a45cc758924abcda3cdef8e50dc108b7272aa280ada8955266142acc87bb6d2e375e3751f37f0135df0c15685097e7e5ac566151cdbd96c64d98401543db76ca7f3aacd916238e18dc474990931b66f1424c091b3dd7712df634e038f61edfa70754a47df0f15af62e94f7ba10c613a0728f002ad7fc466f0d2cf53976efd10c54068e3e0f5f097dc168cf9e7633ee9d2bf5765d5b0d167763e43de53991181be485e51e8bbd4b330a470bfc86ed7ead7e04c5d0f08c1498bb0a654a2e8c928c7019c719173f465507696bfb7c2fe21d61ec8e36087b68b8a5360daaa8225409b6f03e0c281e2ac78c2b6acac618829689c44d53d09efc09a3d6181e77fd6e24bc516a132925eddd2eefc416df6445fa2f08d4cbf209a30caf6678cfa81c778d32c8f7da0383ddd627b3681d8a8bd65ad8721a298b72450342ac679e9607e5ac7b3af600f87eca8d691fec32076d24113f6f4699aec2f6aca2f8753ab7871cdbed3b8073eb8438f8afc807d9e5e8530d298e69fcd0b62fb63b099180a6a1bd57fe63e3223747bc973c4e2781665325c702505cba798ea979e61c86f4af8d855f3c1ddc90661ea05262464703ae88cf52f1a451fe462fe0abcef4972b44ffe20b1f90608e63c1899cb700bbbdc6e04b3800a7936fd5a90c701a16c6ffd10546878e366ee4e6ad80d8cba8f01efa16c7e0b4389777b820771c87231467b4371f3e603c859580ccae225e9c7e6ab3deff4ada175f1c8ee885367f964db6ed0bd5d70f811d93fd592fc9be1c86da99ab1923c7a49105ae34c27cfae67f07ab7a147b307914f75aa90954e409a8d931f07315dcb8c1e0bd904001af7a63ae9bf286c87df6c7d8e46b9d338b9bdd72fbb04e44069fbc3e1c80d6af1cca849f9543069a5b60e06d7be1fc0ec25071f2189e9759be5fb5b0efde0e0a3f8c637b5cdd2da501bf3972de07a775b12fad2a00168ed79b45291a77e74c9687528f4d128e2fc6a7a62252ef91161f30c6aeb7e16890549a2fcb487aba2086c9ac4a22ad97fcaf4e8ed5a2cedf16cdd8d9a54d23cfa2727d0493b9f1fd14e4cd099e29b01232ac66a9571324f3538163832fab4522e49d43260d762195655"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000117e40)={0x0, ""/256, 0x0, 0x0}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000118040)={0x0}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f0000118240)={0x80, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {0x0, 0x0}], 0x0, "ed6b78f4bf3667"}) (async) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r0, 0x81f8943c, &(0x7f0000119240)={0x0}) ioctl$BTRFS_IOC_GET_SUBVOL_ROOTREF(r0, 0xd000943d, &(0x7f0000119440)={0xff, [{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {r1, r2}, {}, {0x0, r3}, {r4, r5}, {r6, r7}, {r13}, {r14, r19}, {0x0, r22}, {r23, r24}, {0x0, r25}, {r26, r27}, {}, {r28, r29}, {r30, r31}, {r32, r33}, {r34, r35}, {r36, r37}, {r38}], 0x8, "bce5f58932c3a5"}) 00:20:23 executing program 2: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x8, 0x50242) r1 = syz_open_dev$dri(&(0x7f00000002c0), 0xffffffff, 0x100) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r1, 0xc01064b5, &(0x7f0000000300)={&(0x7f0000000340)}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000180)={&(0x7f00000004c0)=[0x0, 0x0], &(0x7f0000000140)=[0x0], 0x2, 0x0, 0xaeaeaeae}) r3 = syz_open_dev$dri(&(0x7f0000000340), 0x10000, 0x800) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r3, 0xc04064aa, &(0x7f0000000480)={&(0x7f0000000380)=[0x0, 0x0, 0x0], &(0x7f00000003c0)=[{}, {}, {}, {}], r2, 0x0, '\x00', 0x3, 0x4}) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r0, 0xc04064aa, &(0x7f00000001c0)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[{}, {}], r2, 0x0, '\x00', 0x6, 0x2}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f00000005c0)={&(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000500)=[0x0, 0x0, 0x0], &(0x7f0000000540)=[0x0, 0x0], &(0x7f0000000580)=[0x0, 0x0, 0x0], 0x8, 0x3, 0x2, 0x3}) ioctl$DRM_IOCTL_MODE_GETENCODER(r3, 0xc01464a6, &(0x7f0000000100)) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000240)={&(0x7f0000000200), 0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000280)={r4}) [ 1223.477497][ T1942] bond0 (unregistering): (slave bond_slave_0): Releasing backup interface 00:20:23 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc020660b, &(0x7f0000001440)) 00:20:23 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$vimc2(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000040)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:23 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xc9480440, &(0x7f0000001440)) 00:20:23 executing program 1: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x200, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:23 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) r1 = openat$loop_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x147480, 0x0) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f0000000040)) 00:20:23 executing program 1: r0 = syz_open_dev$dri(&(0x7f0000000000), 0x8, 0x50242) (async) r1 = syz_open_dev$dri(&(0x7f00000002c0), 0xffffffff, 0x100) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r1, 0xc01064b5, &(0x7f0000000300)={&(0x7f0000000340)}) (async) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000180)={&(0x7f00000004c0)=[0x0, 0x0], &(0x7f0000000140)=[0x0], 0x2, 0x0, 0xaeaeaeae}) (async) r3 = syz_open_dev$dri(&(0x7f0000000340), 0x10000, 0x800) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r3, 0xc04064aa, &(0x7f0000000480)={&(0x7f0000000380)=[0x0, 0x0, 0x0], &(0x7f00000003c0)=[{}, {}, {}, {}], r2, 0x0, '\x00', 0x3, 0x4}) ioctl$DRM_IOCTL_MODE_GETPROPERTY(r0, 0xc04064aa, &(0x7f00000001c0)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[{}, {}], r2, 0x0, '\x00', 0x6, 0x2}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f00000005c0)={&(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000500)=[0x0, 0x0, 0x0], &(0x7f0000000540)=[0x0, 0x0], &(0x7f0000000580)=[0x0, 0x0, 0x0], 0x8, 0x3, 0x2, 0x3}) ioctl$DRM_IOCTL_MODE_GETENCODER(r3, 0xc01464a6, &(0x7f0000000100)) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000240)={&(0x7f0000000200), 0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_REVOKE_LEASE(r0, 0xc00464c9, &(0x7f0000000280)={r4}) 00:20:23 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, &(0x7f0000000140)={'broute\x00', 0x0, 0x4, 0xfb, [0xa67d, 0x8, 0xcff, 0x100000001, 0x8000000000000001, 0x400], 0x3, &(0x7f0000000000)=[{}, {}, {}], &(0x7f0000000040)=""/251}, &(0x7f00000001c0)=0x78) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000200)={0xc}) 00:20:23 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r1 = openat$vimc2(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000040)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:23 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xca540440, &(0x7f0000001440)) 00:20:23 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) r1 = openat$loop_ctrl(0xffffffffffffff9c, &(0x7f0000000000), 0x147480, 0x0) ioctl$BTRFS_IOC_GET_SUBVOL_INFO(r1, 0x81f8943c, &(0x7f0000000040)) [ 1223.703967][ T6190] Bluetooth: hci1: command 0x041b tx timeout 00:20:23 executing program 4: r0 = syz_open_dev$dri(&(0x7f0000000000), 0xfffffffffffffffb, 0x0) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f00000003c0)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[{}, {}, {}, {}, {}, {}, {}, {}, {}, {}], &(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0], 0xa, 0xa, 0x6}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f00000004c0)={&(0x7f0000000440)=[0x0], &(0x7f0000000480)=[0x0, 0x0, 0x0, 0x0], 0x1, 0x0, 0xcccccccc}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000580)={&(0x7f0000000500)=[0x0, 0x0, 0x0], &(0x7f0000000540)=[0x0, 0x0], 0x3, 0x0, 0xc0c0c0c0}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f0000000600)={&(0x7f00000005c0)=[0x0, 0x0, 0x0, 0x0], 0x4}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000740)={&(0x7f0000000640)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000680)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000006c0)=[0x0], &(0x7f0000000700)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x8, 0x1, 0x7}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(0xffffffffffffffff, 0xc01064b5, &(0x7f00000007c0)={&(0x7f0000000780)=[0x0, 0x0, 0x0], 0x3}) r7 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r7, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r7, 0xc05064a7, &(0x7f00000003c0)={0x0, 0x0, &(0x7f0000000540)=[0x0], 0x0, 0x0, 0x1, 0x0, 0x0, r8}) ioctl$DRM_IOCTL_MODE_GETENCODER(r0, 0xc01464a6, &(0x7f0000000800)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f00000009c0)={&(0x7f0000000840)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000880)=[{}, {}], &(0x7f0000000940)=[0x0, 0x0], &(0x7f0000000980)=[0x0, 0x0, 0x0], 0x2, 0x2, 0x5, 0x0}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000a80)={&(0x7f0000000a40)=[r1, r2, r3, r4, r5, r6, r8, r9, r10], 0x9, 0x800}) 00:20:23 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, &(0x7f0000000140)={'broute\x00', 0x0, 0x4, 0xfb, [0xa67d, 0x8, 0xcff, 0x100000001, 0x8000000000000001, 0x400], 0x3, &(0x7f0000000000)=[{}, {}, {}], &(0x7f0000000040)=""/251}, &(0x7f00000001c0)=0x78) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000200)={0xc}) 00:20:23 executing program 2: r0 = syz_open_dev$dri(&(0x7f0000000000), 0xfffffffffffffffb, 0x0) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f00000003c0)={&(0x7f0000000040)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000080)=[{}, {}, {}, {}, {}, {}, {}, {}, {}, {}], &(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0], 0xa, 0xa, 0x6}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f00000004c0)={&(0x7f0000000440)=[0x0], &(0x7f0000000480)=[0x0, 0x0, 0x0, 0x0], 0x1, 0x0, 0xcccccccc}) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000580)={&(0x7f0000000500)=[0x0, 0x0, 0x0], &(0x7f0000000540)=[0x0, 0x0], 0x3, 0x0, 0xc0c0c0c0}) (async) ioctl$DRM_IOCTL_MODE_OBJ_GETPROPERTIES(r0, 0xc02064b9, &(0x7f0000000580)={&(0x7f0000000500)=[0x0, 0x0, 0x0], &(0x7f0000000540)=[0x0, 0x0], 0x3, 0x0, 0xc0c0c0c0}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(r0, 0xc01064b5, &(0x7f0000000600)={&(0x7f00000005c0)=[0x0, 0x0, 0x0, 0x0], 0x4}) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000740)={&(0x7f0000000640)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000680)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f00000006c0)=[0x0], &(0x7f0000000700)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x8, 0x1, 0x7}) ioctl$DRM_IOCTL_MODE_GETPLANERESOURCES(0xffffffffffffffff, 0xc01064b5, &(0x7f00000007c0)={&(0x7f0000000780)=[0x0, 0x0, 0x0], 0x3}) r7 = syz_open_dev$dri(&(0x7f0000000000), 0x1, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r7, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r7, 0xc04064a0, &(0x7f0000000140)={0x0, 0x0, &(0x7f00000000c0)=[0x0], 0x0, 0x0, 0x0, 0x1}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r7, 0xc05064a7, &(0x7f00000003c0)={0x0, 0x0, &(0x7f0000000540)=[0x0], 0x0, 0x0, 0x1, 0x0, 0x0, r8}) ioctl$DRM_IOCTL_MODE_GETENCODER(r0, 0xc01464a6, &(0x7f0000000800)={0x0, 0x0, 0x0}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f00000009c0)={&(0x7f0000000840)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000880)=[{}, {}], &(0x7f0000000940)=[0x0, 0x0], &(0x7f0000000980)=[0x0, 0x0, 0x0], 0x2, 0x2, 0x5}) (async) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f00000009c0)={&(0x7f0000000840)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000880)=[{}, {}], &(0x7f0000000940)=[0x0, 0x0], &(0x7f0000000980)=[0x0, 0x0, 0x0], 0x2, 0x2, 0x5, 0x0}) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000a80)={&(0x7f0000000a40)=[r1, r2, r3, r4, r5, r6, r8, r9, r10], 0x9, 0x800}) (async) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(r0, 0xc01864c6, &(0x7f0000000a80)={&(0x7f0000000a40)=[r1, r2, r3, r4, r5, r6, r8, r9, r10], 0x9, 0x800}) [ 1223.805214][ T1942] bond0 (unregistering): Released all slaves 00:20:23 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) 00:20:23 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xdd480440, &(0x7f0000001440)) 00:20:23 executing program 1: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x690c00, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:23 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) r2 = socket$inet_sctp(0x2, 0x1, 0x84) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000380)) r3 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000100), 0x181100, 0x0) r4 = accept$inet(r3, &(0x7f0000000180)={0x2, 0x0, @empty}, &(0x7f00000001c0)=0x10) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000140)) ioctl$BTRFS_IOC_START_SYNC(r4, 0x80089418, &(0x7f0000000200)) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000240)) r5 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r5, 0x4020940d, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r3, 0xc0502100, &(0x7f00000003c0)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r5, 0x40182103, &(0x7f0000000440)={r6, 0x3, r1, 0x6, 0x80000}) bind$inet(r2, &(0x7f0000000000)={0x2, 0x4e23, @dev={0xac, 0x14, 0x14, 0xf}}, 0x10) r7 = openat$zero(0xffffffffffffff9c, &(0x7f0000001280), 0x0, 0x0) connect$phonet_pipe(r7, 0x0, 0xfffffffffffffd7c) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r7, 0x40182103, &(0x7f0000000280)={0x0, 0x2, r0, 0x23}) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r7, 0xc0502100, &(0x7f00000002c0)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r3, 0x40182103, &(0x7f0000000340)={r8, 0x3, r2, 0x5, 0x80000}) setsockopt$inet_mtu(r0, 0x0, 0xa, &(0x7f00000000c0), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) 00:20:23 executing program 1: ioctl$MEDIA_IOC_ENUM_LINKS(0xffffffffffffffff, 0xc0287c02, &(0x7f00000000c0)={0x80000000, &(0x7f0000000000)=[{0x80000000}, {0x80000000}], &(0x7f0000000040)=[{{0x80000000}, {0x80000000, 0x0}}]}) r4 = syz_open_dev$media(&(0x7f0000000080), 0x0, 0x0) ioctl$MEDIA_IOC_ENUM_LINKS(r4, 0xc0287c02, &(0x7f0000000180)={0x80000000, 0x0, &(0x7f0000000100)=[{}, {{0x80000000}}]}) ioctl$MEDIA_IOC_G_TOPOLOGY(r4, 0xc0487c04, &(0x7f00000012c0)={0x0, 0x7, 0x0, &(0x7f0000001a80)=[{}, {}, {}, {0x80000000}, {}, {}, {}], 0x3, 0x0, &(0x7f0000001e80)=[{}, {}, {}], 0x8, 0x0, &(0x7f0000000cc0)=[{}, {}, {}, {0x0, 0x80000000, 0x0, {0x0}}, {}, {0x0, 0x80000000}, {}, {}], 0x10, 0x0, &(0x7f0000002000)=[{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}]}) ioctl$MEDIA_IOC_G_TOPOLOGY(r4, 0xc0487c04, &(0x7f00000016c0)={0x0, 0x2, 0x0, &(0x7f0000001340)=[{}, {}], 0x2, 0x0, &(0x7f0000001400)=[{}, {}], 0x3, 0x0, &(0x7f0000001500)=[{}, {}, {}], 0x7, 0x0, &(0x7f0000001580)=[{}, {}, {}, {}, {}, {}, {}]}) r9 = syz_open_dev$media(&(0x7f0000000080), 0x0, 0x0) ioctl$MEDIA_IOC_ENUM_LINKS(r9, 0xc0287c02, &(0x7f0000000300)={0x80000000, 0x0, &(0x7f0000000040)=[{{}, {0x80000000}}]}) ioctl$MEDIA_IOC_SETUP_LINK(r9, 0xc0347c03, &(0x7f0000000880)={{r10}}) ioctl$MEDIA_IOC_ENUM_LINKS(r9, 0xc0287c02, &(0x7f0000000f80)={r0, &(0x7f0000000ec0), &(0x7f0000000f00)}) r11 = syz_open_dev$media(&(0x7f0000000040), 0x0, 0x0) ioctl$MEDIA_IOC_G_TOPOLOGY(r9, 0xc0487c04, &(0x7f0000000c80)={0x0, 0x4, 0x0, &(0x7f0000001040)=[{}, {}, {}, {}, {}], 0x21, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0}) ioctl$MEDIA_IOC_ENUM_LINKS(r11, 0xc0287c02, &(0x7f0000000e80)={r6, &(0x7f0000000dc0), &(0x7f0000000e00)}) ioctl$MEDIA_IOC_ENUM_LINKS(r4, 0xc0287c02, &(0x7f0000001940)={r8, &(0x7f0000001880), &(0x7f00000018c0)}) ioctl$MEDIA_IOC_ENUM_LINKS(r4, 0xc0287c02, &(0x7f0000001800)={r5, &(0x7f0000001740), &(0x7f0000001780)=[{}, {{}, {0x80000000}}]}) ioctl$MEDIA_IOC_SETUP_LINK(r4, 0xc0347c03, &(0x7f0000001840)={{r10, r3, 0x4, [0x3ff, 0x6a87f7b]}, {r12, r7, 0x2, [0x10000, 0x7]}, 0x0, [0x20, 0x1ff]}) ioctl$MEDIA_IOC_ENUM_LINKS(r4, 0xc0287c02, &(0x7f00000003c0)={r5, 0x0, 0x0}) ioctl$MEDIA_IOC_ENUM_LINKS(r4, 0xc0287c02, &(0x7f0000000500)={0x80000000, &(0x7f0000000440), &(0x7f0000000480)}) ioctl$MEDIA_IOC_ENUM_LINKS(0xffffffffffffffff, 0xc0287c02, &(0x7f00000001c0)={r1, &(0x7f0000000100), &(0x7f0000000140)}) r13 = syz_open_dev$media(&(0x7f0000000200), 0x2b3780, 0x80) r14 = syz_open_dev$media(&(0x7f0000000080), 0x0, 0x0) ioctl$MEDIA_IOC_G_TOPOLOGY(r4, 0xc0487c04, &(0x7f00000007c0)={0x0, 0x4, 0x0, &(0x7f0000000540)=[{}, {}, {}, {}], 0x7, 0x0, &(0x7f00000008c0)=[{}, {}, {}, {}, {}, {}, {}], 0x4, 0x0, &(0x7f00000006c0)=[{}, {}, {}, {0x0, 0x80000000}], 0x2, 0x0, &(0x7f0000000740)=[{}, {}]}) ioctl$MEDIA_IOC_ENUM_LINKS(r4, 0xc0287c02, &(0x7f0000000c80)={r15, &(0x7f0000000840), &(0x7f0000000c00)}) ioctl$MEDIA_IOC_ENUM_LINKS(r14, 0xc0287c02, &(0x7f0000000300)={0x80000000, 0x0, &(0x7f0000000040)=[{{0x80000000}, {0x80000000}}]}) ioctl$MEDIA_IOC_ENUM_LINKS(r13, 0xc0287c02, &(0x7f0000000400)={r16, &(0x7f0000000340), &(0x7f0000000380)}) ioctl$MEDIA_IOC_SETUP_LINK(r14, 0xc0347c03, &(0x7f0000000880)={{r2}}) ioctl$MEDIA_IOC_ENUM_LINKS(r13, 0xc0287c02, &(0x7f0000000300)={r17, &(0x7f0000000240), &(0x7f0000000280)}) 00:20:23 executing program 4: r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x690c00, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) openat$iommufd(0xffffffffffffff9c, &(0x7f0000000040), 0x690c00, 0x0) (async) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) (async) 00:20:23 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) [ 1223.958188][ T3198] 8021q: adding VLAN 0 to HW filter on device bond0 00:20:23 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000001440)) [ 1223.982612][ T3198] 8021q: adding VLAN 0 to HW filter on device team0 [ 1224.003578][ T3797] BUG: MAX_LOCKDEP_KEYS too low! [ 1224.004617][ T3642] bridge0: port 1(bridge_slave_0) entered blocking state [ 1224.004999][ T3797] turning off the locking correctness validator. [ 1224.006943][ T3642] bridge0: port 1(bridge_slave_0) entered forwarding state [ 1224.008551][ T3797] CPU: 1 PID: 3797 Comm: syz-executor.3 Not tainted 6.8.0-rc7-syzkaller-g707081b61156 #0 [ 1224.013241][ T3797] Hardware name: Google Google Compute Engine/Google Compute Engine, BIOS Google 01/25/2024 [ 1224.016043][ T3797] Call trace: [ 1224.016918][ T3797] dump_backtrace+0x1b8/0x1e4 [ 1224.018193][ T3797] show_stack+0x2c/0x3c [ 1224.019277][ T3797] dump_stack_lvl+0xd0/0x124 [ 1224.020511][ T3797] dump_stack+0x1c/0x28 [ 1224.021626][ T3797] register_lock_class+0x568/0x6ac [ 1224.023009][ T3797] __lock_acquire+0x184/0x763c [ 1224.024249][ T3797] lock_acquire+0x23c/0x71c [ 1224.025467][ T3797] __flush_workqueue+0x14c/0x11c4 [ 1224.026798][ T3797] nci_close_device+0x140/0x5b8 [ 1224.028151][ T3797] nci_unregister_device+0x58/0x21c [ 1224.029536][ T3797] virtual_ncidev_close+0x5c/0xa0 [ 1224.030920][ T3797] __fput+0x30c/0x738 [ 1224.032073][ T3797] __fput_sync+0x60/0x9c [ 1224.033197][ T3797] __arm64_sys_close+0x150/0x1e0 [ 1224.034624][ T3797] invoke_syscall+0x98/0x2b8 [ 1224.035881][ T3797] el0_svc_common+0x130/0x23c [ 1224.037137][ T3797] do_el0_svc+0x48/0x58 [ 1224.038246][ T3797] el0_svc+0x54/0x168 [ 1224.039329][ T3797] el0t_64_sync_handler+0x84/0xfc [ 1224.040727][ T3797] el0t_64_sync+0x190/0x194 [ 1224.042086][ C1] vkms_vblank_simulate: vblank timer overrun 00:20:24 executing program 4: ioctl$MEDIA_IOC_ENUM_LINKS(0xffffffffffffffff, 0xc0287c02, &(0x7f00000000c0)={0x80000000, &(0x7f0000000000)=[{0x80000000}, {0x80000000}], &(0x7f0000000040)=[{{0x80000000}, {0x80000000, 0x0}}]}) (async) r4 = syz_open_dev$media(&(0x7f0000000080), 0x0, 0x0) ioctl$MEDIA_IOC_ENUM_LINKS(r4, 0xc0287c02, &(0x7f0000000180)={0x80000000, 0x0, &(0x7f0000000100)=[{}, {{0x80000000}}]}) (async) ioctl$MEDIA_IOC_G_TOPOLOGY(r4, 0xc0487c04, &(0x7f00000012c0)={0x0, 0x7, 0x0, &(0x7f0000001a80)=[{}, {}, {}, {0x80000000}, {}, {}, {}], 0x3, 0x0, &(0x7f0000001e80)=[{}, {}, {}], 0x8, 0x0, &(0x7f0000000cc0)=[{}, {}, {}, {0x0, 0x80000000, 0x0, {0x0}}, {}, {0x0, 0x80000000}, {}, {}], 0x10, 0x0, &(0x7f0000002000)=[{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}]}) (async) ioctl$MEDIA_IOC_G_TOPOLOGY(r4, 0xc0487c04, &(0x7f00000016c0)={0x0, 0x2, 0x0, &(0x7f0000001340)=[{}, {}], 0x2, 0x0, &(0x7f0000001400)=[{}, {}], 0x3, 0x0, &(0x7f0000001500)=[{}, {}, {}], 0x7, 0x0, &(0x7f0000001580)=[{}, {}, {}, {}, {}, {}, {}]}) (async) r9 = syz_open_dev$media(&(0x7f0000000080), 0x0, 0x0) ioctl$MEDIA_IOC_ENUM_LINKS(r9, 0xc0287c02, &(0x7f0000000300)={0x80000000, 0x0, &(0x7f0000000040)=[{{}, {0x80000000}}]}) ioctl$MEDIA_IOC_SETUP_LINK(r9, 0xc0347c03, &(0x7f0000000880)={{r10}}) ioctl$MEDIA_IOC_ENUM_LINKS(r9, 0xc0287c02, &(0x7f0000000f80)={r0, &(0x7f0000000ec0), &(0x7f0000000f00)}) r11 = syz_open_dev$media(&(0x7f0000000040), 0x0, 0x0) (async) ioctl$MEDIA_IOC_G_TOPOLOGY(r9, 0xc0487c04, &(0x7f0000000c80)={0x0, 0x4, 0x0, &(0x7f0000001040)=[{}, {}, {}, {}, {}], 0x21, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0}) ioctl$MEDIA_IOC_ENUM_LINKS(r11, 0xc0287c02, &(0x7f0000000e80)={r6, &(0x7f0000000dc0), &(0x7f0000000e00)}) (async) ioctl$MEDIA_IOC_ENUM_LINKS(r4, 0xc0287c02, &(0x7f0000001940)={r8, &(0x7f0000001880), &(0x7f00000018c0)}) ioctl$MEDIA_IOC_ENUM_LINKS(r4, 0xc0287c02, &(0x7f0000001800)={r5, &(0x7f0000001740), &(0x7f0000001780)=[{}, {{}, {0x80000000}}]}) ioctl$MEDIA_IOC_SETUP_LINK(r4, 0xc0347c03, &(0x7f0000001840)={{r10, r3, 0x4, [0x3ff, 0x6a87f7b]}, {r12, r7, 0x2, [0x10000, 0x7]}, 0x0, [0x20, 0x1ff]}) ioctl$MEDIA_IOC_ENUM_LINKS(r4, 0xc0287c02, &(0x7f00000003c0)={r5, 0x0, 0x0}) ioctl$MEDIA_IOC_ENUM_LINKS(r4, 0xc0287c02, &(0x7f0000000500)={0x80000000, &(0x7f0000000440), &(0x7f0000000480)}) (async) ioctl$MEDIA_IOC_ENUM_LINKS(0xffffffffffffffff, 0xc0287c02, &(0x7f00000001c0)={r1, &(0x7f0000000100), &(0x7f0000000140)}) r13 = syz_open_dev$media(&(0x7f0000000200), 0x2b3780, 0x80) (async) r14 = syz_open_dev$media(&(0x7f0000000080), 0x0, 0x0) ioctl$MEDIA_IOC_G_TOPOLOGY(r4, 0xc0487c04, &(0x7f00000007c0)={0x0, 0x4, 0x0, &(0x7f0000000540)=[{}, {}, {}, {}], 0x7, 0x0, &(0x7f00000008c0)=[{}, {}, {}, {}, {}, {}, {}], 0x4, 0x0, &(0x7f00000006c0)=[{}, {}, {}, {0x0, 0x80000000}], 0x2, 0x0, &(0x7f0000000740)=[{}, {}]}) ioctl$MEDIA_IOC_ENUM_LINKS(r4, 0xc0287c02, &(0x7f0000000c80)={r15, &(0x7f0000000840), &(0x7f0000000c00)}) (async) ioctl$MEDIA_IOC_ENUM_LINKS(r14, 0xc0287c02, &(0x7f0000000300)={0x80000000, 0x0, &(0x7f0000000040)=[{{0x80000000}, {0x80000000}}]}) ioctl$MEDIA_IOC_ENUM_LINKS(r13, 0xc0287c02, &(0x7f0000000400)={r16, &(0x7f0000000340), &(0x7f0000000380)}) ioctl$MEDIA_IOC_SETUP_LINK(r14, 0xc0347c03, &(0x7f0000000880)={{r2}}) ioctl$MEDIA_IOC_ENUM_LINKS(r13, 0xc0287c02, &(0x7f0000000300)={r17, &(0x7f0000000240), &(0x7f0000000280)}) 00:20:24 executing program 1: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x400000, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x0, 0x0, 0xffffffffffffffff}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f00000001c0)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x2, 0x365ed2, 0xb2, &(0x7f0000000100)="2e3c551d660c96f7f62a33a72cfa87a6c9738227e97fdaab47f63b12b54597d1207518797341b59099d91840d9adb9beb11f3392c3c022038ae06138db5b26c89ba3dd708d6ec7f5e7a9bf75be26a90133ce55a0e97d71031e71a844450bc4043d3fe6d6ac53f52d6a03e0eaccdf841bade28226db6c177c1b43d5236badec3c189e8103e8cfa845d56e4748a141b843e34c202d16bc2aed27196b682b62ad82c715004a07fa94060c6ada1a2a780891e58f", 0x4}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000040)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:24 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0x9000000, 0x0) 00:20:24 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (async) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) (async) r1 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) r2 = socket$inet_sctp(0x2, 0x1, 0x84) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000000380)) r3 = openat$ubi_ctrl(0xffffffffffffff9c, &(0x7f0000000100), 0x181100, 0x0) r4 = accept$inet(r3, &(0x7f0000000180)={0x2, 0x0, @empty}, &(0x7f00000001c0)=0x10) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000140)) ioctl$BTRFS_IOC_START_SYNC(r4, 0x80089418, &(0x7f0000000200)) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000240)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000240)) openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) (async) r5 = openat$dlm_monitor(0xffffffffffffff9c, &(0x7f0000000ac0), 0x0, 0x0) ioctl$IOMMU_VFIO_IOAS$GET(r5, 0x4020940d, 0x0) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r3, 0xc0502100, &(0x7f00000003c0)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r5, 0x40182103, &(0x7f0000000440)={r6, 0x3, r1, 0x6, 0x80000}) (async) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r5, 0x40182103, &(0x7f0000000440)={r6, 0x3, r1, 0x6, 0x80000}) bind$inet(r2, &(0x7f0000000000)={0x2, 0x4e23, @dev={0xac, 0x14, 0x14, 0xf}}, 0x10) r7 = openat$zero(0xffffffffffffff9c, &(0x7f0000001280), 0x0, 0x0) connect$phonet_pipe(r7, 0x0, 0xfffffffffffffd7c) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r7, 0x40182103, &(0x7f0000000280)={0x0, 0x2, r0, 0x23}) ioctl$SECCOMP_IOCTL_NOTIF_RECV(r7, 0xc0502100, &(0x7f00000002c0)={0x0}) ioctl$SECCOMP_IOCTL_NOTIF_ADDFD(r3, 0x40182103, &(0x7f0000000340)={r8, 0x3, r2, 0x5, 0x80000}) setsockopt$inet_mtu(r0, 0x0, 0xa, &(0x7f00000000c0), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r1, 0x0, &(0x7f0000001440)) [ 1224.057785][ T3198] hsr0: Slave A (hsr_slave_0) is not up; please bring it up to get a fully working HSR network [ 1224.060533][ T3198] hsr0: Slave B (hsr_slave_1) is not up; please bring it up to get a fully working HSR network [ 1224.064728][ T1691] bridge0: port 2(bridge_slave_1) entered blocking state [ 1224.066599][ T1691] bridge0: port 2(bridge_slave_1) entered forwarding state 00:20:24 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0x9000000, 0x0) 00:20:24 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:24 executing program 3: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x400000, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x0, 0x0, 0xffffffffffffffff}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f00000001c0)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x2, 0x365ed2, 0xb2, &(0x7f0000000100)="2e3c551d660c96f7f62a33a72cfa87a6c9738227e97fdaab47f63b12b54597d1207518797341b59099d91840d9adb9beb11f3392c3c022038ae06138db5b26c89ba3dd708d6ec7f5e7a9bf75be26a90133ce55a0e97d71031e71a844450bc4043d3fe6d6ac53f52d6a03e0eaccdf841bade28226db6c177c1b43d5236badec3c189e8103e8cfa845d56e4748a141b843e34c202d16bc2aed27196b682b62ad82c715004a07fa94060c6ada1a2a780891e58f", 0x4}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000040)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:24 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0x9000000, 0x0) [ 1224.163370][ T3198] 8021q: adding VLAN 0 to HW filter on device batadv0 00:20:24 executing program 2: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x400000, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x0, 0x0, 0xffffffffffffffff}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f00000001c0)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x2, 0x365ed2, 0xb2, &(0x7f0000000100)="2e3c551d660c96f7f62a33a72cfa87a6c9738227e97fdaab47f63b12b54597d1207518797341b59099d91840d9adb9beb11f3392c3c022038ae06138db5b26c89ba3dd708d6ec7f5e7a9bf75be26a90133ce55a0e97d71031e71a844450bc4043d3fe6d6ac53f52d6a03e0eaccdf841bade28226db6c177c1b43d5236badec3c189e8103e8cfa845d56e4748a141b843e34c202d16bc2aed27196b682b62ad82c715004a07fa94060c6ada1a2a780891e58f", 0x4}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000040)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:24 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0x9000000, 0x0) 00:20:24 executing program 3: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 1: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x400000, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x0, 0x0, 0xffffffffffffffff}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f00000001c0)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x2, 0x365ed2, 0xb2, &(0x7f0000000100)="2e3c551d660c96f7f62a33a72cfa87a6c9738227e97fdaab47f63b12b54597d1207518797341b59099d91840d9adb9beb11f3392c3c022038ae06138db5b26c89ba3dd708d6ec7f5e7a9bf75be26a90133ce55a0e97d71031e71a844450bc4043d3fe6d6ac53f52d6a03e0eaccdf841bade28226db6c177c1b43d5236badec3c189e8103e8cfa845d56e4748a141b843e34c202d16bc2aed27196b682b62ad82c715004a07fa94060c6ada1a2a780891e58f", 0x4}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000040)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) [ 1224.212136][ T3198] veth0_vlan: entered promiscuous mode 00:20:24 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 4: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x400000, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x0, 0x0, 0xffffffffffffffff}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f00000001c0)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x2, 0x365ed2, 0xb2, &(0x7f0000000100)="2e3c551d660c96f7f62a33a72cfa87a6c9738227e97fdaab47f63b12b54597d1207518797341b59099d91840d9adb9beb11f3392c3c022038ae06138db5b26c89ba3dd708d6ec7f5e7a9bf75be26a90133ce55a0e97d71031e71a844450bc4043d3fe6d6ac53f52d6a03e0eaccdf841bade28226db6c177c1b43d5236badec3c189e8103e8cfa845d56e4748a141b843e34c202d16bc2aed27196b682b62ad82c715004a07fa94060c6ada1a2a780891e58f", 0x4}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000040)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:24 executing program 1: syz_open_dev$dri(0x0, 0x9000000, 0x0) [ 1224.237267][ T3198] veth1_vlan: entered promiscuous mode 00:20:24 executing program 3: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x400000, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x0, 0x0, 0xffffffffffffffff}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f00000001c0)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x2, 0x365ed2, 0xb2, &(0x7f0000000100)="2e3c551d660c96f7f62a33a72cfa87a6c9738227e97fdaab47f63b12b54597d1207518797341b59099d91840d9adb9beb11f3392c3c022038ae06138db5b26c89ba3dd708d6ec7f5e7a9bf75be26a90133ce55a0e97d71031e71a844450bc4043d3fe6d6ac53f52d6a03e0eaccdf841bade28226db6c177c1b43d5236badec3c189e8103e8cfa845d56e4748a141b843e34c202d16bc2aed27196b682b62ad82c715004a07fa94060c6ada1a2a780891e58f", 0x4}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000040)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:24 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 4: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) [ 1224.263563][ T3198] veth0_macvtap: entered promiscuous mode [ 1224.267578][ T3198] veth1_macvtap: entered promiscuous mode 00:20:24 executing program 1: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x400000, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x0, 0x0, 0xffffffffffffffff}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f00000001c0)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x2, 0x365ed2, 0xb2, &(0x7f0000000100)="2e3c551d660c96f7f62a33a72cfa87a6c9738227e97fdaab47f63b12b54597d1207518797341b59099d91840d9adb9beb11f3392c3c022038ae06138db5b26c89ba3dd708d6ec7f5e7a9bf75be26a90133ce55a0e97d71031e71a844450bc4043d3fe6d6ac53f52d6a03e0eaccdf841bade28226db6c177c1b43d5236badec3c189e8103e8cfa845d56e4748a141b843e34c202d16bc2aed27196b682b62ad82c715004a07fa94060c6ada1a2a780891e58f", 0x4}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000040)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:24 executing program 3: syz_open_dev$dri(0x0, 0x9000000, 0x0) [ 1224.293712][ T3198] batman_adv: The newly added mac address (aa:aa:aa:aa:aa:3e) already exists on: batadv_slave_0 00:20:24 executing program 4: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) [ 1224.296625][ T3198] batman_adv: It is strongly recommended to keep mac addresses unique to avoid problems! 00:20:24 executing program 1: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) [ 1224.299270][ T3198] batman_adv: The newly added mac address (aa:aa:aa:aa:aa:3e) already exists on: batadv_slave_0 [ 1224.302017][ T3198] batman_adv: It is strongly recommended to keep mac addresses unique to avoid problems! 00:20:24 executing program 2: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x400000, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8}) [ 1224.310209][ T3198] batman_adv: The newly added mac address (aa:aa:aa:aa:aa:3e) already exists on: batadv_slave_0 [ 1224.316419][ T3198] batman_adv: It is strongly recommended to keep mac addresses unique to avoid problems! [ 1224.319057][ T3198] batman_adv: The newly added mac address (aa:aa:aa:aa:aa:3e) already exists on: batadv_slave_0 00:20:24 executing program 3: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) [ 1224.321708][ T3198] batman_adv: It is strongly recommended to keep mac addresses unique to avoid problems! 00:20:24 executing program 2: syz_open_dev$dri(0x0, 0x9000000, 0x0) 00:20:24 executing program 4: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8}) [ 1224.334618][ T3198] batman_adv: batadv0: Interface activated: batadv_slave_0 [ 1224.342796][ T3198] batman_adv: The newly added mac address (aa:aa:aa:aa:aa:3f) already exists on: batadv_slave_1 00:20:24 executing program 1: openat$iommufd(0xffffffffffffff9c, 0x0, 0x0, 0x0) [ 1224.351378][ T3198] batman_adv: It is strongly recommended to keep mac addresses unique to avoid problems! [ 1224.355467][ T3198] batman_adv: The newly added mac address (aa:aa:aa:aa:aa:3f) already exists on: batadv_slave_1 00:20:24 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) [ 1224.365352][ T3198] batman_adv: It is strongly recommended to keep mac addresses unique to avoid problems! 00:20:24 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) [ 1224.367962][ T3198] batman_adv: The newly added mac address (aa:aa:aa:aa:aa:3f) already exists on: batadv_slave_1 [ 1224.370719][ T3198] batman_adv: It is strongly recommended to keep mac addresses unique to avoid problems! 00:20:24 executing program 3: ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8}) [ 1224.373327][ T3198] batman_adv: The newly added mac address (aa:aa:aa:aa:aa:3f) already exists on: batadv_slave_1 00:20:24 executing program 1: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) 00:20:24 executing program 2: openat$iommufd(0xffffffffffffff9c, 0x0, 0x0, 0x0) 00:20:24 executing program 4: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) [ 1224.384823][ T3198] batman_adv: It is strongly recommended to keep mac addresses unique to avoid problems! [ 1224.400192][ T3198] batman_adv: batadv0: Interface activated: batadv_slave_1 00:20:24 executing program 3: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) [ 1224.417086][ T3198] netdevsim netdevsim0 netdevsim0: set [1, 0] type 2 family 0 port 6081 - 0 [ 1224.419670][ T3198] netdevsim netdevsim0 netdevsim1: set [1, 0] type 2 family 0 port 6081 - 0 00:20:24 executing program 1: ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, 0x0) [ 1224.422045][ T3198] netdevsim netdevsim0 netdevsim2: set [1, 0] type 2 family 0 port 6081 - 0 00:20:24 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) [ 1224.433338][ T3198] netdevsim netdevsim0 netdevsim3: set [1, 0] type 2 family 0 port 6081 - 0 00:20:24 executing program 4: openat$iommufd(0xffffffffffffff9c, 0x0, 0x0, 0x0) 00:20:24 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) 00:20:24 executing program 1: accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) 00:20:24 executing program 3: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 1: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) 00:20:24 executing program 4: openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) 00:20:24 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:24 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) [ 1224.539897][ T3198] ieee80211 phy169: Selected rate control algorithm 'minstrel_ht' 00:20:24 executing program 4: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 3: openat$drirender128(0xffffffffffffff9c, 0x0, 0x0, 0x0) 00:20:24 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:24 executing program 3: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) [ 1224.592522][ T41] wlan0: Created IBSS using preconfigured BSSID 50:50:50:50:50:50 [ 1224.593196][ T3198] ieee80211 phy170: Selected rate control algorithm 'minstrel_ht' [ 1224.595044][ T41] wlan0: Creating new IBSS network, BSSID 50:50:50:50:50:50 00:20:24 executing program 4: openat$drirender128(0xffffffffffffff9c, 0x0, 0x0, 0x0) 00:20:24 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) socket$inet_dccp(0x2, 0x6, 0x0) 00:20:24 executing program 3: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 1: accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) [ 1224.635510][ T5970] wlan1: Created IBSS using preconfigured BSSID 50:50:50:50:50:50 [ 1224.637658][ T5970] wlan1: Creating new IBSS network, BSSID 50:50:50:50:50:50 00:20:24 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:24 executing program 2: openat$drirender128(0xffffffffffffff9c, 0x0, 0x0, 0x0) 00:20:24 executing program 1: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) socket$inet_dccp(0x2, 0x6, 0x0) 00:20:24 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:24 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:24 executing program 2: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) [ 1224.720825][ T3979] xt_check_table_hooks: 32 callbacks suppressed [ 1224.720839][ T3979] x_tables: duplicate underflow at hook 1 00:20:24 executing program 1: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) socket$inet_dccp(0x2, 0x6, 0x0) 00:20:24 executing program 4: r0 = openat$drirender128(0xffffffffffffff9c, 0x0, 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) 00:20:24 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) 00:20:24 executing program 3: openat$iommufd(0xffffffffffffff9c, &(0x7f0000000380), 0x0, 0x0) r0 = openat$iommufd(0xffffffffffffff9c, &(0x7f0000000000), 0x400000, 0x0) ioctl$DRM_IOCTL_MODE_CREATE_LEASE(0xffffffffffffffff, 0xc01864c6, &(0x7f00000000c0)={&(0x7f0000000080)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x8, 0x0, 0x0, 0xffffffffffffffff}) ioctl$IOMMU_TEST_OP_ACCESS_RW$syz(r1, 0x3ba0, &(0x7f00000001c0)={0x48, 0x8, 0xffffffffffffffff, 0x0, 0x2, 0x365ed2, 0xb2, &(0x7f0000000100)="2e3c551d660c96f7f62a33a72cfa87a6c9738227e97fdaab47f63b12b54597d1207518797341b59099d91840d9adb9beb11f3392c3c022038ae06138db5b26c89ba3dd708d6ec7f5e7a9bf75be26a90133ce55a0e97d71031e71a844450bc4043d3fe6d6ac53f52d6a03e0eaccdf841bade28226db6c177c1b43d5236badec3c189e8103e8cfa845d56e4748a141b843e34c202d16bc2aed27196b682b62ad82c715004a07fa94060c6ada1a2a780891e58f", 0x4}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, &(0x7f0000000040)={0xc}) ioctl$IOMMU_VFIO_IOAS$GET(r0, 0x3b88, 0x0) 00:20:24 executing program 1: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) ioctl$BTRFS_IOC_START_SYNC(0xffffffffffffffff, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 4: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) socket$inet_dccp(0x2, 0x6, 0x0) 00:20:24 executing program 3: r0 = openat$drirender128(0xffffffffffffff9c, 0x0, 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) 00:20:24 executing program 0: syz_open_dev$dri(0x0, 0x9000000, 0x0) 00:20:24 executing program 2: openat$nci(0xffffffffffffff9c, 0x0, 0x2, 0x0) 00:20:24 executing program 4: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) ioctl$BTRFS_IOC_START_SYNC(0xffffffffffffffff, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 1: accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) socket$inet_dccp(0x2, 0x6, 0x0) 00:20:24 executing program 0: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 3: syz_open_dev$dri(0x0, 0x9000000, 0x0) 00:20:24 executing program 2: r0 = openat$drirender128(0xffffffffffffff9c, 0x0, 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) 00:20:24 executing program 4: openat$nci(0xffffffffffffff9c, 0x0, 0x2, 0x0) 00:20:24 executing program 1: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) ioctl$BTRFS_IOC_START_SYNC(0xffffffffffffffff, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 3: socket$inet_dccp(0x2, 0x6, 0x0) 00:20:24 executing program 2: syz_open_dev$dri(0x0, 0x9000000, 0x0) 00:20:24 executing program 0: openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) 00:20:24 executing program 4: openat$iommufd(0xffffffffffffff9c, 0x0, 0x0, 0x0) 00:20:24 executing program 1: openat$nci(0xffffffffffffff9c, 0x0, 0x2, 0x0) 00:20:24 executing program 4: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) r1 = socket$inet_dccp(0x2, 0x6, 0x0) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000000)) 00:20:24 executing program 0: openat$iommufd(0xffffffffffffff9c, 0x0, 0x0, 0x0) 00:20:24 executing program 1: openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) 00:20:24 executing program 2: syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) 00:20:24 executing program 3: syz_open_dev$dri(0x0, 0x0, 0x0) 00:20:25 executing program 0: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 4: openat$iommufd(0xffffffffffffff9c, 0x0, 0x0, 0x0) 00:20:25 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = socket$inet_dccp(0x2, 0x6, 0x0) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 3: syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) 00:20:25 executing program 1: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 0: openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) 00:20:25 executing program 2: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:25 executing program 3: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) r1 = socket$inet_dccp(0x2, 0x6, 0x0) ioctl$BTRFS_IOC_START_SYNC(r1, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 4: syz_open_dev$dri(&(0x7f0000000000), 0x0, 0x0) 00:20:25 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:25 executing program 0: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 3: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, 0x0) 00:20:25 executing program 4: accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) r0 = socket$inet_dccp(0x2, 0x6, 0x0) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 0: syz_open_dev$dri(0x0, 0x0, 0x0) 00:20:25 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) socket$inet_dccp(0x2, 0x6, 0x0) 00:20:25 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:25 executing program 1: r0 = socket$inet_dccp(0x2, 0x6, 0x0) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) socket$inet_dccp(0x2, 0x6, 0x0) 00:20:25 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) [ 1225.274049][ T4083] x_tables: duplicate underflow at hook 1 00:20:25 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:25 executing program 3: socket$inet_dccp(0x2, 0x6, 0x0) ioctl$BTRFS_IOC_START_SYNC(0xffffffffffffffff, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 4: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, 0x0) 00:20:25 executing program 2: syz_open_dev$dri(0x0, 0x0, 0x0) 00:20:25 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) [ 1225.314949][ T4092] x_tables: duplicate underflow at hook 1 00:20:25 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:25 executing program 2: socket$inet_dccp(0x2, 0x6, 0x0) ioctl$BTRFS_IOC_START_SYNC(0xffffffffffffffff, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) [ 1225.394958][ T4107] x_tables: duplicate underflow at hook 1 00:20:25 executing program 0: syz_open_dev$dri(0x0, 0x0, 0x0) 00:20:25 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:25 executing program 2: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, 0x0) 00:20:25 executing program 1: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 3: socket$inet_dccp(0x2, 0x6, 0x0) ioctl$BTRFS_IOC_START_SYNC(0xffffffffffffffff, 0x80089418, &(0x7f0000000000)) [ 1225.444890][ T4111] x_tables: duplicate underflow at hook 1 00:20:25 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:25 executing program 1: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) 00:20:25 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:25 executing program 1: r0 = socket$inet_dccp(0x2, 0x6, 0x0) ioctl$BTRFS_IOC_START_SYNC(r0, 0x80089418, 0x0) [ 1225.505435][ T4124] x_tables: duplicate underflow at hook 1 00:20:25 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) socket$inet_dccp(0x2, 0x6, 0x0) [ 1225.511305][ T4129] x_tables: duplicate underflow at hook 1 00:20:25 executing program 4: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000240)={0x0, &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x0, 0x0, 0x7, 0x6}) 00:20:25 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) socket$inet(0x2, 0x3, 0x9) 00:20:25 executing program 3: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 2: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:25 executing program 0: socket$inet(0x2, 0x3, 0x9) 00:20:25 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:25 executing program 2: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000240)={0x0, 0x0, &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x0, 0x0, 0x7, 0x6}) 00:20:25 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:25 executing program 0: syz_open_dev$dri(0x0, 0x0, 0x0) 00:20:25 executing program 1: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, 0x0) 00:20:25 executing program 1: socket$inet(0x2, 0x0, 0x9) 00:20:25 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:25 executing program 0: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000240)={0x0, 0x0, 0x0, &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x0, 0x0, 0x0, 0x6}) 00:20:25 executing program 3: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, 0x0) 00:20:25 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:25 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:25 executing program 1: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:25 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x48]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:25 executing program 0: socket$inet(0x2, 0x0, 0x9) [ 1225.774408][ T6190] Bluetooth: hci1: command 0x040f tx timeout [ 1225.798175][ T4190] x_tables: duplicate underflow at hook 1 00:20:25 executing program 1: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, &(0x7f0000000240)={0x0, 0x0, 0x0, 0x0}) 00:20:25 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:25 executing program 3: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, 0x0) 00:20:25 executing program 2: socket$inet(0x2, 0x0, 0x9) 00:20:25 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:25 executing program 1: socket$inet(0x2, 0x0, 0x9) 00:20:25 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:25 executing program 0: ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, 0x0) 00:20:25 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfdfdffff, &(0x7f0000001440)) 00:20:25 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:25 executing program 0: socket$inet(0x2, 0x0, 0x9) 00:20:25 executing program 3: ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, 0x0) [ 1225.980094][ T4225] x_tables: duplicate underflow at hook 1 00:20:25 executing program 1: socket$inet(0x2, 0x0, 0x9) 00:20:26 executing program 2: socket$inet(0x2, 0x3, 0x0) 00:20:26 executing program 3: r0 = socket$inet(0x2, 0x0, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 4: ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, 0x0) 00:20:26 executing program 1: socket$inet(0x2, 0x0, 0x9) 00:20:26 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x4c]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 2: socket$inet(0x2, 0x0, 0x9) 00:20:26 executing program 4: socket$inet(0x2, 0x0, 0x9) [ 1226.112528][ T4248] x_tables: duplicate underflow at hook 1 00:20:26 executing program 0: socket$inet(0x2, 0x3, 0x0) 00:20:26 executing program 3: r0 = openat$drirender128(0xffffffffffffff9c, 0x0, 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, 0x0) 00:20:26 executing program 1: r0 = socket$inet(0x2, 0x0, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 2: r0 = openat$drirender128(0xffffffffffffff9c, 0x0, 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, 0x0) 00:20:26 executing program 4: socket$inet(0x2, 0x0, 0x0) 00:20:26 executing program 3: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0], 0x4, 0x4, 0x5, 0x3}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000480)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000500)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f0000000100)=[0x0, 0x0], &(0x7f0000000300), 0x8, 0x2, 0xd, 0x0, r2}) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000000)) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, r1, 0x9, 0x1, 0x8}) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000040)) ioctl$DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE_FD(r0, 0xc01064c2, &(0x7f00000000c0)={0x0}) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r0, 0xc01064c1, &(0x7f0000000180)={r4}) 00:20:26 executing program 2: socket$inet(0x2, 0x0, 0x9) 00:20:26 executing program 0: r0 = openat$drirender128(0xffffffffffffff9c, 0x0, 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r0, 0xc04064a0, 0x0) 00:20:26 executing program 3: socket$inet(0x2, 0x3, 0x0) 00:20:26 executing program 1: socket$inet(0x2, 0x0, 0x9) 00:20:26 executing program 0: r0 = socket$inet(0x2, 0x0, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 4: openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, 0x0) 00:20:26 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 1: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0], 0x4, 0x4, 0x5, 0x3}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000480)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000500)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f0000000100)=[0x0, 0x0], &(0x7f0000000300), 0x8, 0x2, 0xd, 0x0, r2}) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000000)) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, r1, 0x9, 0x1, 0x8}) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000040)) ioctl$DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE_FD(r0, 0xc01064c2, &(0x7f00000000c0)={0x0}) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r0, 0xc01064c1, &(0x7f0000000180)={r4}) 00:20:26 executing program 3: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0], 0x4, 0x4, 0x5, 0x3}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000480)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000500)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f0000000100)=[0x0, 0x0], &(0x7f0000000300), 0x8, 0x2, 0xd, 0x0, r2}) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000000)) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, r1, 0x9, 0x1, 0x8}) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000040)) ioctl$DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE_FD(r0, 0xc01064c2, &(0x7f00000000c0)={0x0}) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r0, 0xc01064c1, &(0x7f0000000180)={r4}) 00:20:26 executing program 4: socket$inet(0x2, 0x0, 0x0) 00:20:26 executing program 2: openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, 0x0) 00:20:26 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 1: r0 = socket$inet(0x2, 0x3, 0x0) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 0: openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, 0x0) 00:20:26 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x6c]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 2: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x60a00, 0x0) (async) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0], 0x4, 0x4, 0x5, 0x3}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000480)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000500)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f0000000100)=[0x0, 0x0], &(0x7f0000000300), 0x8, 0x2, 0xd, 0x0, r2}) (async, rerun: 32) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) (rerun: 32) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000000)) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, r1, 0x9, 0x1, 0x8}) (async) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000040)) ioctl$DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE_FD(r0, 0xc01064c2, &(0x7f00000000c0)={0x0}) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r0, 0xc01064c1, &(0x7f0000000180)={r4}) 00:20:26 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 3: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0], 0x4, 0x4, 0x5, 0x3}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000480)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000500)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f0000000100)=[0x0, 0x0], &(0x7f0000000300), 0x8, 0x2, 0xd, 0x0, r2}) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000000)) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, r1, 0x9, 0x1, 0x8}) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000040)) ioctl$DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE_FD(r0, 0xc01064c2, &(0x7f00000000c0)={0x0}) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r0, 0xc01064c1, &(0x7f0000000180)={r4}) 00:20:26 executing program 0: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x74]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 4: r0 = accept4$inet(0xffffffffffffffff, 0x0, &(0x7f0000000080), 0x0) setsockopt$inet_msfilter(r0, 0x0, 0x29, &(0x7f00000000c0)={@dev={0xac, 0x14, 0x14, 0x2c}, @private=0xa010102, 0x0, 0x3, [@rand_addr=0x64010101, @local, @dev={0xac, 0x14, 0x14, 0x15}]}, 0x1c) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000040)) (async) r1 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000100), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(r1, 0xc04064a0, &(0x7f0000000240)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000180), &(0x7f00000001c0)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000200)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0], 0x4, 0x0, 0x7, 0x6}) r2 = socket$inet_dccp(0x2, 0x6, 0x0) setsockopt$inet_mtu(r2, 0x0, 0xa, &(0x7f0000000000), 0x4) (async) ioctl$BTRFS_IOC_START_SYNC(r2, 0x80089418, &(0x7f0000000000)) 00:20:26 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 0: r0 = socket$inet(0x2, 0x3, 0x0) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x7a]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfffffdfd, &(0x7f0000001440)) 00:20:26 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 4: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0], 0x4, 0x4, 0x5, 0x3}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000480)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000500)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f0000000100)=[0x0, 0x0], &(0x7f0000000300), 0x8, 0x2, 0xd, 0x0, r2}) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000000)) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, r1, 0x9, 0x1, 0x8}) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000040)) ioctl$DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE_FD(r0, 0xc01064c2, &(0x7f00000000c0)={0x0}) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r0, 0xc01064c1, &(0x7f0000000180)={r4}) 00:20:26 executing program 3: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0], 0x4, 0x4, 0x5, 0x3}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000480)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000500)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f0000000100)=[0x0, 0x0], &(0x7f0000000300), 0x8, 0x2, 0xd, 0x0, r2}) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000000)) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, r1, 0x9, 0x1, 0x8}) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000040)) ioctl$DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE_FD(r0, 0xc01064c2, &(0x7f00000000c0)={0x0}) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r0, 0xc01064c1, &(0x7f0000000180)={r4}) 00:20:26 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0xfffffdfd, &(0x7f0000001440)) 00:20:26 executing program 1: r0 = socket$inet(0x2, 0x3, 0x0) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 1: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0], 0x4, 0x4, 0x5, 0x3}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000480)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000500)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f0000000100)=[0x0, 0x0], &(0x7f0000000300), 0x8, 0x2, 0xd, 0x0, r2}) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000000)) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, r1, 0x9, 0x1, 0x8}) 00:20:26 executing program 3: socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:26 executing program 4: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0], 0x4, 0x4, 0x5, 0x3}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000480)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000500)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f0000000100)=[0x0, 0x0], &(0x7f0000000300), 0x8, 0x2, 0xd, 0x0, r2}) r3 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r3, 0x0, &(0x7f0000000000)) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, r1, 0x9, 0x1, 0x8}) ioctl$BTRFS_IOC_START_SYNC(r3, 0x80089418, &(0x7f0000000040)) ioctl$DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE_FD(r0, 0xc01064c2, &(0x7f00000000c0)={0x0}) ioctl$DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD_FD(r0, 0xc01064c1, &(0x7f0000000180)={r4}) 00:20:26 executing program 0: r0 = socket$inet(0x2, 0x0, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 3: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0xe8]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 1: socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 3: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0], 0x4, 0x4, 0x5, 0x3}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000480)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000500)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f0000000100)=[0x0, 0x0], &(0x7f0000000300), 0x8, 0x2, 0xd, 0x0, r2}) openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, r1, 0x9, 0x1, 0x8}) 00:20:27 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:27 executing program 1: r0 = socket$inet(0x2, 0x0, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 4: socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:27 executing program 0: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0], 0x4, 0x4, 0x5, 0x3}) ioctl$DRM_IOCTL_MODE_GETCONNECTOR(r0, 0xc05064a7, &(0x7f0000000480)={&(0x7f0000000140)=[0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000500)=[{}, {}, {}, {}, {}, {}, {}, {}], &(0x7f0000000100)=[0x0, 0x0], &(0x7f0000000300), 0x8, 0x2, 0xd, 0x0, r2}) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, r1, 0x9, 0x1, 0x8}) 00:20:27 executing program 1: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:27 executing program 4: r0 = socket$inet(0x2, 0x0, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 3: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:27 executing program 2: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:27 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:27 executing program 4: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_GETRESOURCES(0xffffffffffffffff, 0xc04064a0, &(0x7f0000000440)={&(0x7f0000000340)=[0x0, 0x0, 0x0, 0x0], &(0x7f0000000380)=[0x0, 0x0, 0x0, 0x0], &(0x7f00000003c0)=[0x0, 0x0, 0x0, 0x0, 0x0], &(0x7f0000000400)=[0x0, 0x0, 0x0], 0x4, 0x4, 0x5, 0x3}) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, r1, 0x9, 0x1, 0x8}) 00:20:27 executing program 1: r0 = socket$inet(0x2, 0x3, 0x0) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:27 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:27 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:27 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:27 executing program 4: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, 0x0, 0x9, 0x1, 0x8}) 00:20:27 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 0: r0 = socket$inet(0x2, 0x3, 0x0) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:27 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 2: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x0, 0x2]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:27 executing program 2: ioctl$DRM_IOCTL_MODE_PAGE_FLIP(0xffffffffffffffff, 0xc01864b0, &(0x7f00000001c0)={0x0, 0x0, 0x9, 0x1, 0x8}) 00:20:27 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:27 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:27 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 1: r0 = socket$inet(0x2, 0x3, 0x0) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 3: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:27 executing program 2: setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, 0x0, 0x0) 00:20:27 executing program 4: ioctl$DRM_IOCTL_MODE_PAGE_FLIP(0xffffffffffffffff, 0xc01864b0, &(0x7f00000001c0)={0x0, 0x0, 0x9, 0x1, 0x8}) [ 1227.854134][ T6190] Bluetooth: hci1: command 0x0419 tx timeout 00:20:27 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) 00:20:27 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:27 executing program 2: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:27 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 3: socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 1: ioctl$DRM_IOCTL_MODE_PAGE_FLIP(0xffffffffffffffff, 0xc01864b0, &(0x7f00000001c0)={0x0, 0x0, 0x9, 0x1, 0x8}) 00:20:27 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 3: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000080)) 00:20:27 executing program 2: socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 2: setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, 0x0, 0x0) 00:20:27 executing program 1: r0 = socket$inet(0x2, 0x3, 0x0) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 2: socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:27 executing program 4: r0 = openat$drirender128(0xffffffffffffff9c, 0x0, 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, 0x0, 0x9, 0x1, 0x8}) 00:20:27 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:28 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:28 executing program 2: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000080)) 00:20:28 executing program 4: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:28 executing program 0: r0 = openat$drirender128(0xffffffffffffff9c, 0x0, 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, 0x0, 0x9, 0x1, 0x8}) 00:20:28 executing program 3: setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, 0x0, 0x0) 00:20:28 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:28 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) 00:20:28 executing program 1: socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(0xffffffffffffffff, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x68]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:28 executing program 4: r0 = openat$drirender128(0xffffffffffffff9c, 0x0, 0x60a00, 0x0) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, 0x0, 0x9, 0x1, 0x8}) 00:20:28 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:28 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:28 executing program 1: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000080)) 00:20:28 executing program 4: r0 = socket$inet(0x2, 0x0, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:28 executing program 2: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, &(0x7f00000001c0)={0x0, 0x0, 0x9, 0x1, 0x8}) 00:20:28 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:28 executing program 1: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:28 executing program 3: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:28 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x25]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:28 executing program 0: openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(0xffffffffffffffff, 0xc01864b0, &(0x7f00000001c0)={0x0, 0x0, 0x9, 0x1, 0x8}) 00:20:28 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:28 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) 00:20:28 executing program 3: r0 = socket$inet(0x2, 0x0, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:28 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000080)) 00:20:28 executing program 2: openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(0xffffffffffffffff, 0xc01864b0, &(0x7f00000001c0)={0x0, 0x0, 0x9, 0x1, 0x8}) 00:20:28 executing program 1: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:28 executing program 4: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000080)) 00:20:28 executing program 1: r0 = openat$nci(0xffffffffffffff9c, 0x0, 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) 00:20:28 executing program 2: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:28 executing program 0: openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(0xffffffffffffffff, 0xc01864b0, &(0x7f00000001c0)={0x0, 0x0, 0x9, 0x1, 0x8}) 00:20:28 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) 00:20:28 executing program 3: r0 = openat$nci(0xffffffffffffff9c, 0x0, 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) 00:20:28 executing program 2: r0 = socket$inet(0x2, 0x0, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:28 executing program 0: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:28 executing program 4: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x0, 0x3]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:28 executing program 3: r0 = openat$drirender128(0xffffffffffffff9c, &(0x7f0000000080), 0x0, 0x0) ioctl$DRM_IOCTL_MODE_PAGE_FLIP(r0, 0xc01864b0, 0x0) 00:20:28 executing program 2: r0 = openat$nci(0xffffffffffffff9c, 0x0, 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) 00:20:28 executing program 1: r0 = socket$inet(0x2, 0x3, 0x0) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:28 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000080)) 00:20:28 executing program 3: r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:28 executing program 4: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) 00:20:28 executing program 0: openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(0xffffffffffffffff, 0x0, &(0x7f0000000080)) 00:20:28 executing program 1: getsockopt$EBT_SO_GET_INIT_ENTRIES(0xffffffffffffffff, 0x0, 0x83, 0x0, 0x0) r0 = socket$inet(0x2, 0x3, 0x9) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, &(0x7f0000000cc0)={'filter\x00', 0x7, 0x4, 0x3a0, 0x1d0, 0x0, 0xe8, 0x2b8, 0x2b8, 0x2b8, 0x4, 0x0, {[{{@uncond, 0xc0, 0xe8}, @unspec=@NFQUEUE1={0x28}}, {{@uncond, 0xc0, 0xe8}, @unspec=@STANDARD={0x28, '\x00', 0x0, 0xfffffffffffffffe}}, {{@arp={@initdev={0xac, 0x1e, 0x0, 0x0}, @multicast2, 0x0, 0x0, 0x0, 0x0, {@empty, {[0x0, 0x0, 0x0, 0x0, 0x0, 0x4]}}, {@mac=@link_local}, 0x0, 0x0, 0x0, 0x0, 0x4, 0x0, 'dvmrp1\x00', 'gretap0\x00'}, 0xc0, 0xe8}, @unspec=@CLASSIFY={0x28}}], {{'\x00', 0xc0, 0xe8}, {0x28}}}}, 0x3f0) 00:20:28 executing program 3: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$zero(0xffffffffffffff9c, &(0x7f0000000000), 0x200100, 0x0) read$usbfs(r1, 0x0, 0xffffffffffffffa7) setsockopt$inet_mtu(r1, 0x0, 0xa, &(0x7f0000000040)=0x5, 0x4) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) r2 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) ioctl$LOOP_CTL_ADD(r2, 0x4c80, 0xa) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f0000000080)) 00:20:28 executing program 4: r0 = socket$inet(0x2, 0x3, 0x0) setsockopt$ARPT_SO_SET_REPLACE(r0, 0x0, 0x60, 0x0, 0x0) 00:20:28 executing program 3: openat$nci(0xffffffffffffff9c, &(0x7f0000000000), 0x2, 0x0) r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000080)) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) (async) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f00000000c0)) 00:20:28 executing program 0: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$zero(0xffffffffffffff9c, &(0x7f0000000000), 0x200100, 0x0) read$usbfs(r1, 0x0, 0xffffffffffffffa7) setsockopt$inet_mtu(r1, 0x0, 0xa, &(0x7f0000000040)=0x5, 0x4) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) r2 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) ioctl$LOOP_CTL_ADD(r2, 0x4c80, 0xa) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f0000000080)) 00:20:28 executing program 1: r0 = openat$nci(0xffffffffffffff9c, &(0x7f0000001400), 0x2, 0x0) r1 = openat$zero(0xffffffffffffff9c, &(0x7f0000000000), 0x200100, 0x0) read$usbfs(r1, 0x0, 0xffffffffffffffa7) setsockopt$inet_mtu(r1, 0x0, 0xa, &(0x7f0000000040)=0x5, 0x4) ioctl$IOCTL_GET_NCIDEV_IDX(r0, 0x0, &(0x7f0000000000)) r2 = openat$proc_capi20ncci(0xffffffffffffff9c, &(0x7f0000001180), 0x0, 0x0) ioctl$LOOP_CTL_ADD(r2, 0x4c80, 0xa) ioctl$IOCTL_GET_NCIDEV_IDX(r2, 0x0, &(0x7f0000000080)) 00:20:28 executing program 3: openat$nci(0xffffffffffffff9c, &(0x7f0000000040), 0x2, 0x0) ioctl$IOCT VM DIAGNOSIS: Warning: Permanently added '10.128.1.53' (ED25519) to the list of known hosts. lock-classes: 8178 [max: 8192] direct dependencies: 30688 [max: 131072] indirect dependencies: 164354 all direct dependencies: 2567643 dependency chains: 52168 [max: 65536] dependency chain hlocks used: 205834 [max: 327680] dependency chain hlocks lost: 0 in-hardirq chains: 130 in-softirq chains: 1588 in-process chains: 50450 stack-trace entries: 291640 [max: 1048576] number of stack traces: 13076 number of stack hash chains: 8991 combined max dependencies:hardirq-safe locks: 60 hardirq-unsafe locks: 7573 softirq-safe locks: 258 softirq-unsafe locks: 7293 irq-safe locks: 275 irq-unsafe locks: 7573 hardirq-read-safe locks: 4 hardirq-read-unsafe locks: 166 softirq-read-safe locks: 18 softirq-read-unsafe locks: 154 irq-read-safe locks: 18 irq-read-unsafe locks: 166 uncategorized locks: 330 unused locks: 2 max locking depth: 21 max bfs queue depth: 1474 max lock class index: 8191 debug_locks: 0 zapped classes: 17154 zapped lock chains: 140910 large chain blocks: 1 all lock classes: FD: 1 BD: 179 -.-.: (console_sem).lock FD: 239 BD: 5 +.+.: console_lock ->pool_lock#2 ->&obj_hash[i].lock ->&____s->seqcount ->&c->lock ->kbd_event_lock ->(console_sem).lock ->console_owner_lock ->fs_reclaim ->&x->wait#9 ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#11 ->&fb_info->lock ->vt_event_lock ->&base->lock ->subsys mutex#6 ->&helper->lock ->&helper->damage_lock ->&rq->__lock ->&lock->wait_lock ->&p->pi_lock FD: 1 BD: 1 ....: console_srcu FD: 32 BD: 1 +.+.: fill_pool_map-wait-type-override ->pool_lock#2 ->&____s->seqcount ->pool_lock ->&c->lock ->&obj_hash[i].lock ->&rq->__lock ->&cfs_rq->removed.lock ->&n->list_lock ->&____s->seqcount#2 ->rcu_node_0 ->&rcu_state.expedited_wq ->batched_entropy_u8.lock ->kfence_freelist_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 2 BD: 3175 -.-.: &obj_hash[i].lock ->pool_lock FD: 1 BD: 3175 -.-.: pool_lock FD: 710 BD: 15 +.+.: cgroup_mutex ->pcpu_alloc_mutex ->&c->lock ->&____s->seqcount ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&obj_hash[i].lock ->cgroup_file_kn_lock ->css_set_lock ->blkcg_pol_mutex ->percpu_counters_lock ->shrinker_mutex ->&base->lock ->devcgroup_mutex ->cpu_hotplug_lock ->fs_reclaim ->&n->list_lock ->&x->wait#3 ->&rq->__lock ->cgroup_mutex.wait_lock ->cgroup_rstat_lock ->cpuset_mutex ->&dom->lock ->batched_entropy_u32.lock ->cgroup_idr_lock ->task_group_lock ->(wq_completion)cpuset_migrate_mm ->&wq->mutex ->&____s->seqcount#2 ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock FD: 49 BD: 1 +.+.: fixmap_lock ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 FD: 225 BD: 88 ++++: cpu_hotplug_lock ->jump_label_mutex ->cpuhp_state_mutex ->wq_pool_mutex ->freezer_mutex ->rcu_tasks_trace__percpu.cbs_pcpu_lock ->&ACCESS_PRIVATE(rtpcp, lock) ->smpboot_threads_lock ->&obj_hash[i].lock ->&pool->lock ->&x->wait#4 ->&rq->__lock ->mem_hotplug_lock ->mem_hotplug_lock.waiters.lock ->mem_hotplug_lock.rss.gp_wait.lock ->cpu_hotplug_lock.rss.gp_wait.lock ->rcu_node_0 ->cpu_hotplug_lock.waiters.lock ->&swhash->hlist_mutex ->pmus_lock ->pcp_batch_high_lock ->&xa->xa_lock ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount ->kthread_create_lock ->&p->pi_lock ->&x->wait ->hrtimer_bases.lock ->wq_pool_attach_mutex ->pcpu_alloc_mutex ->sparse_irq_lock ->&x->wait#6 ->cpuhp_state-up ->stop_cpus_mutex ->&wq->mutex ->flush_lock ->xps_map_mutex ->css_set_lock ->cpuset_mutex ->cgroup_threadgroup_rwsem ->cgroup_threadgroup_rwsem.waiters.lock ->cgroup_threadgroup_rwsem.rss.gp_wait.lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&list->lock#12 ->wq_pool_mutex.wait_lock ->(work_completion)(flush) ->&x->wait#10 FD: 26 BD: 93 +.+.: jump_label_mutex ->patch_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 98 BD: 12 +.+.: console_mutex ->&port_lock_key ->syslog_lock ->(console_sem).lock ->&rq->__lock ->&root->kernfs_rwsem ->kernfs_notify_lock FD: 1 BD: 110 ..-.: input_pool.lock FD: 1 BD: 3165 ..-.: base_crng.lock FD: 1 BD: 94 ....: patch_lock FD: 1 BD: 1 ....: rcu_read_lock FD: 1 BD: 1 ....: crng_init_wait.lock FD: 1 BD: 1 ....: early_pfn_lock FD: 1 BD: 8 ....: devtree_lock FD: 1 BD: 1 ....: rcu_read_lock_sched FD: 10 BD: 93 ++++: resource_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount FD: 1 BD: 1 ....: restart_handler_list.lock FD: 1 BD: 1 +.+.: system_transition_mutex FD: 3 BD: 482 ..-.: pcpu_lock ->stock_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 1 BD: 1 ....: debug_hook_lock FD: 2 BD: 1 ....: zonelist_update_seq ->zonelist_update_seq.seqcount FD: 1 BD: 2 ....: zonelist_update_seq.seqcount FD: 163 BD: 89 +.+.: cpuhp_state_mutex ->cpuhp_state-down ->cpuhp_state-up ->resource_lock ->pool_lock#2 ->(console_sem).lock ->clockevents_lock ->&irq_desc_lock_class ->&p->pi_lock ->&x->wait#6 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->fs_reclaim ->&c->lock ->&____s->seqcount ->lock ->&root->kernfs_rwsem ->crypto_alg_sem ->scomp_lock FD: 2 BD: 3076 ..-.: &zone->lock ->&____s->seqcount FD: 1 BD: 3142 .-.-: &____s->seqcount FD: 26 BD: 95 +.+.: &pcp->lock ->&zone->lock FD: 1 BD: 3204 -.-.: pool_lock#2 FD: 57 BD: 162 +.+.: pcpu_alloc_mutex ->pcpu_lock ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&c->lock ->&____s->seqcount ->init_mm.page_table_lock ->&obj_hash[i].lock ->&rq->__lock ->&cfs_rq->removed.lock ->&____s->seqcount#2 ->pcpu_alloc_mutex.wait_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->key ->percpu_counters_lock ->&base->lock FD: 6 BD: 3132 -.-.: &n->list_lock ->&c->lock FD: 5 BD: 3156 -.-.: &c->lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 103 BD: 61 +.+.: slab_mutex ->pool_lock#2 ->pcpu_alloc_mutex ->&c->lock ->&____s->seqcount ->fs_reclaim ->batched_entropy_u8.lock ->kfence_freelist_lock ->&rq->__lock ->lock ->&root->kernfs_rwsem ->&k->list_lock ->&obj_hash[i].lock ->&n->list_lock FD: 65 BD: 3 +.+.: trace_types_lock ->fs_reclaim ->pool_lock#2 ->pin_fs_lock ->&sb->s_type->i_mutex_key#5 FD: 1 BD: 2 ....: panic_notifier_list.lock FD: 1 BD: 1 ....: die_chain.lock FD: 51 BD: 4 +.+.: trace_event_sem ->trace_event_ida.xa_lock ->&rq->__lock ->fs_reclaim ->batched_entropy_u8.lock ->kfence_freelist_lock ->&c->lock ->&____s->seqcount ->pool_lock#2 ->eventfs_mutex FD: 3 BD: 224 ..-.: batched_entropy_u32.lock ->crngs.lock FD: 2 BD: 3164 ..-.: crngs.lock ->base_crng.lock FD: 17 BD: 387 +.+.: sysctl_lock ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock FD: 24 BD: 3033 -.-.: &rq->__lock ->&per_cpu_ptr(group->pcpu, cpu)->seq ->&obj_hash[i].lock ->&base->lock ->&cfs_rq->removed.lock ->&rt_b->rt_runtime_lock ->&cp->lock ->&rt_rq->rt_runtime_lock ->pool_lock#2 ->cpu_asid_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 1 BD: 3034 ....: &cfs_b->lock FD: 25 BD: 1 ....: init_task.pi_lock ->&rq->__lock FD: 1 BD: 1 ....: init_task.vtime_seqcount FD: 61 BD: 92 +.+.: wq_pool_mutex ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&wq->mutex ->&obj_hash[i].lock ->fs_reclaim ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->wq_pool_attach_mutex ->&pool->lock ->&xa->xa_lock ->&n->list_lock ->&____s->seqcount#2 ->wq_pool_mutex.wait_lock ->rcu_node_0 ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&rcu_state.expedited_wq ->quarantine_lock ->&cfs_rq->removed.lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 36 BD: 105 +.+.: &wq->mutex ->&pool->lock ->&x->wait#10 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 29 BD: 528 -.-.: &pool->lock ->&obj_hash[i].lock ->&p->pi_lock ->pool_lock#2 ->(worker)->lock ->wq_mayday_lock ->&base->lock ->&x->wait#10 FD: 49 BD: 57 +.+.: shrinker_mutex ->pool_lock#2 ->fs_reclaim FD: 1 BD: 622 -.-.: rcu_node_0 FD: 13 BD: 50 -.-.: rcu_state.barrier_lock ->rcu_node_0 ->&obj_hash[i].lock FD: 30 BD: 3 ....: &rnp->exp_poll_lock FD: 9 BD: 5 ....: trace_event_ida.xa_lock ->&____s->seqcount ->&c->lock ->pool_lock#2 FD: 1 BD: 1 ....: trigger_cmd_mutex FD: 17 BD: 231 +.+.: free_vmap_area_lock ->&obj_hash[i].lock ->pool_lock#2 ->init_mm.page_table_lock ->quarantine_lock ->&meta->lock ->kfence_freelist_lock ->&____s->seqcount ->&base->lock FD: 1 BD: 235 +.+.: vmap_area_lock FD: 243 BD: 1 ....: acpi_probe_mutex ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&zone->lock ->&____s->seqcount ->init_mm.page_table_lock ->resource_lock ->&c->lock ->cpu_hotplug_lock ->(console_sem).lock ->irq_domain_mutex ->pcpu_alloc_mutex ->&domain->mutex ->&desc->request_mutex ->&irq_desc_lock_class ->cpu_pm_notifier.lock ->&obj_hash[i].lock ->purge_vmap_area_lock ->iort_msi_chip_lock ->its_lock ->efi_mem_reserve_persistent_lock ->lpi_range_lock ->syscore_ops_lock ->clocksource_mutex FD: 14 BD: 232 +.+.: init_mm.page_table_lock ->&obj_hash[i].lock ->&base->lock FD: 49 BD: 5 +.+.: irq_domain_mutex ->pool_lock#2 ->fs_reclaim FD: 119 BD: 7 +.+.: &domain->mutex ->sparse_irq_lock ->pool_lock#2 ->&irq_desc_lock_class ->fs_reclaim ->&obj_hash[i].lock ->&its->dev_alloc_lock FD: 116 BD: 95 +.+.: sparse_irq_lock ->&____s->seqcount ->pool_lock#2 ->pcpu_alloc_mutex ->&obj_hash[i].lock ->&c->lock ->(cpu_running).wait.lock ->&base->lock ->&rq->__lock ->(&timer.timer) ->&x->wait#6 ->&p->pi_lock ->&irq_desc_lock_class ->fs_reclaim ->lock ->&root->kernfs_rwsem ->proc_subdir_lock ->&ent->pde_unload_lock ->proc_inum_ida.xa_lock ->sysfs_symlink_target_lock ->kernfs_idr_lock FD: 7 BD: 107 -.-.: &irq_desc_lock_class ->irq_controller_lock ->mask_lock ->&its->lock ->irq_resend_lock ->tmp_mask_lock FD: 19 BD: 16 +.+.: &desc->request_mutex ->&irq_desc_lock_class ->proc_subdir_lock ->&ent->pde_unload_lock ->proc_inum_ida.xa_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 2 ....: cpu_pm_notifier.lock FD: 16 BD: 36 +.+.: purge_vmap_area_lock ->&obj_hash[i].lock ->pool_lock#2 ->quarantine_lock ->&meta->lock ->kfence_freelist_lock ->&____s->seqcount ->&base->lock FD: 1 BD: 3 +.+.: iort_msi_chip_lock FD: 2 BD: 2 ....: its_lock ->&its->lock FD: 1 BD: 2 ....: efi_mem_reserve_persistent_lock FD: 4 BD: 9 +.+.: lpi_range_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 2 +.+.: syscore_ops_lock FD: 1 BD: 112 ....: &its->lock FD: 1 BD: 90 +.+.: cpuhp_state-down FD: 153 BD: 90 +.+.: cpuhp_state-up ->smpboot_threads_lock ->sparse_irq_lock ->&swhash->hlist_mutex ->pmus_lock ->&x->wait#5 ->&obj_hash[i].lock ->hrtimer_bases.lock ->wq_pool_mutex ->rcu_node_0 ->resource_lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&rq->__lock ->fs_reclaim ->lock ->&root->kernfs_rwsem ->&x->wait#9 ->&k->list_lock ->bus_type_sem ->&k->k_lock ->&pcp->lock ->swap_slots_cache_mutex FD: 4 BD: 1 -.-.: timekeeper_lock ->tk_core.seq.seqcount FD: 3 BD: 3070 ----: tk_core.seq.seqcount ->&obj_hash[i].lock FD: 1 BD: 108 ....: irq_controller_lock FD: 7 BD: 90 ....: clockevents_lock ->tk_core.seq.seqcount ->tick_broadcast_lock ->jiffies_seq.seqcount FD: 3 BD: 91 -...: tick_broadcast_lock ->jiffies_lock FD: 1 BD: 93 -.-.: jiffies_seq.seqcount FD: 231 BD: 2 +.+.: clocksource_mutex ->cpu_hotplug_lock ->(console_sem).lock FD: 12 BD: 3093 -.-.: &base->lock ->&obj_hash[i].lock FD: 3 BD: 5 ....: batched_entropy_u64.lock ->crngs.lock FD: 123 BD: 91 +.+.: pmus_lock ->pcpu_alloc_mutex ->pool_lock#2 ->&obj_hash[i].lock ->&cpuctx_mutex ->fs_reclaim ->&k->list_lock ->lock ->&root->kernfs_rwsem ->uevent_sock_mutex ->running_helpers_waitq.lock ->&c->lock ->&____s->seqcount ->&x->wait#9 ->bus_type_sem ->sysfs_symlink_target_lock ->&k->k_lock ->&dev->power.lock ->dpm_list_mtx ->subsys mutex#29 FD: 1 BD: 91 +.+.: &swhash->hlist_mutex FD: 1 BD: 92 +.+.: &cpuctx_mutex FD: 1 BD: 7 ....: tty_ldiscs_lock FD: 2 BD: 6 ....: kbd_event_lock ->led_lock FD: 1 BD: 7 ..-.: led_lock FD: 1 BD: 151 -...: console_owner_lock FD: 39 BD: 3 +.+.: init_task.alloc_lock ->init_fs.lock FD: 52 BD: 2 +.+.: acpi_ioremap_lock ->pool_lock#2 ->fs_reclaim ->&____s->seqcount ->free_vmap_area_lock ->vmap_area_lock FD: 1 BD: 17 ....: semaphore->lock FD: 1 BD: 16 +.+.: *(&acpi_gbl_reference_count_lock) FD: 14 BD: 3056 -.-.: hrtimer_bases.lock ->tk_core.seq.seqcount ->&obj_hash[i].lock FD: 1 BD: 462 ..-.: percpu_counters_lock FD: 34 BD: 2 +.+.: tomoyo_policy_lock ->pool_lock#2 ->mmu_notifier_invalidate_range_start ->&c->lock ->&____s->seqcount ->&n->list_lock ->&rq->__lock FD: 860 BD: 4 ++++: pernet_ops_rwsem ->stack_depot_init_mutex ->crngs.lock ->net_rwsem ->proc_inum_ida.xa_lock ->pool_lock#2 ->proc_subdir_lock ->fs_reclaim ->&____s->seqcount ->&c->lock ->sysctl_lock ->pcpu_alloc_mutex ->net_generic_ids.xa_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#8 ->&dir->lock ->&obj_hash[i].lock ->k-sk_lock-AF_NETLINK ->k-slock-AF_NETLINK ->nl_table_lock ->nl_table_wait.lock ->rtnl_mutex ->uevent_sock_mutex ->&zone->lock ->&net->rules_mod_lock ->slab_mutex ->batched_entropy_u32.lock ->percpu_counters_lock ->k-slock-AF_INET/1 ->cache_list_lock ->tk_core.seq.seqcount ->&k->list_lock ->lock ->&root->kernfs_rwsem ->pool_lock ->running_helpers_waitq.lock ->&sn->pipefs_sb_lock ->krc.lock ->&rq->__lock ->&s->s_inode_list_lock ->nf_hook_mutex ->cpu_hotplug_lock ->hwsim_netgroup_ida.xa_lock ->nf_ct_ecache_mutex ->nf_log_mutex ->ipvs->est_mutex ->&base->lock ->__ip_vs_app_mutex ->&hashinfo->lock#2 ->&net->ipv6.ip6addrlbl_table.lock ->(console_sem).lock ->k-sk_lock-AF_INET6 ->k-slock-AF_INET6 ->k-clock-AF_INET6 ->wq_pool_mutex ->pcpu_lock ->&list->lock#4 ->&dir->lock#2 ->ptype_lock ->k-clock-AF_TIPC ->k-sk_lock-AF_TIPC ->k-slock-AF_TIPC ->&this->receive_lock ->once_lock ->nf_ct_proto_mutex ->k-sk_lock-AF_RXRPC ->k-slock-AF_RXRPC ->&rxnet->conn_lock ->&call->waitq ->&rx->call_lock ->&rxnet->call_lock ->&n->list_lock ->rdma_nets.xa_lock ->devices_rwsem ->rtnl_mutex.wait_lock ->&p->pi_lock ->remove_cache_srcu ->uevent_sock_mutex.wait_lock ->&net->nsid_lock ->ebt_mutex ->nf_nat_proto_mutex ->&xt[i].mutex ->&nft_net->commit_mutex ->&____s->seqcount#2 ->rcu_node_0 ->&rcu_state.expedited_wq ->netns_bpf_mutex ->&x->wait#3 ->(&net->fs_probe_timer) ->&net->cells_lock ->(&net->cells_timer) ->bit_wait_table + i ->(&net->fs_timer) ->(wq_completion)kafsd ->&wq->mutex ->k-clock-AF_RXRPC ->&local->services_lock ->(wq_completion)krxrpcd ->rlock-AF_RXRPC ->&x->wait ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->&ent->pde_unload_lock ->ovs_mutex ->(work_completion)(&(&ovs_net->masks_rebalance)->work) ->(work_completion)(&ovs_net->dp_notify_work) ->&srv->idr_lock ->&rnp->exp_wq[2] ->(work_completion)(&tn->work) ->&tn->nametbl_lock ->&rnp->exp_wq[1] ->rcu_state.exp_mutex.wait_lock ->&rnp->exp_lock ->(work_completion)(&ht->run_work) ->&ht->mutex ->(work_completion)(&(&c->work)->work) ->(wq_completion)krdsd ->(work_completion)(&rtn->rds_tcp_accept_w) ->rds_tcp_conn_lock ->loop_conns_lock ->(wq_completion)l2tp ->rcu_state.barrier_mutex ->(&rxnet->peer_keepalive_timer) ->(work_completion)(&rxnet->peer_keepalive_work) ->(&rxnet->service_conn_reap_timer) ->&x->wait#10 ->dev_base_lock ->lweventlist_lock ->napi_hash_lock ->quarantine_lock ->netdev_unregistering_wq.lock ->&fn->fou_lock ->ipvs->sync_mutex ->hwsim_radio_lock ->pin_fs_lock ->&dentry->d_lock ->&sb->s_type->i_mutex_key#3 ->&sb->s_type->i_lock_key#7 ->mount_lock ->(inetaddr_chain).rwsem ->inet6addr_chain.lock ->(work_completion)(&local->restart_work) ->&list->lock#16 ->&rdev->wiphy.mtx ->(work_completion)(&rfkill->uevent_work) ->(work_completion)(&rfkill->sync_work) ->dev_pm_qos_sysfs_mtx ->kernfs_idr_lock ->&k->k_lock ->sysfs_symlink_target_lock ->subsys mutex#40 ->&x->wait#9 ->dpm_list_mtx ->&dev->power.lock ->deferred_probe_mutex ->device_links_lock ->&rfkill->lock ->rfkill_global_mutex ->rfkill_global_mutex.wait_lock ->triggers_list_lock ->leds_list_lock ->(work_completion)(&rdev->wiphy_work) ->(work_completion)(&rdev->conn_work) ->(work_completion)(&rdev->event_work) ->(work_completion)(&(&rdev->dfs_update_channels_wk)->work) ->(work_completion)(&(&rdev->background_cac_done_wk)->work) ->(work_completion)(&rdev->destroy_work) ->(work_completion)(&rdev->propagate_radar_detect_wk) ->(work_completion)(&rdev->propagate_cac_done_wk) ->(work_completion)(&rdev->mgmt_registrations_update_wk) ->(work_completion)(&rdev->background_cac_abort_wk) ->subsys mutex#53 ->gdp_mutex ->(&local->sta_cleanup) ->&cfs_rq->removed.lock ->rdma_nets_rwsem ->k-clock-AF_NETLINK ->&nlk->wait ->&wg->device_update_lock ->&bat_priv->forw_bcast_list_lock ->&bat_priv->forw_bat_list_lock ->&bat_priv->gw.list_lock ->(work_completion)(&(&bat_priv->bat_v.ogm_wq)->work) ->&bat_priv->bat_v.ogm_buff_mutex ->&bat_priv->tvlv.container_list_lock ->&bat_priv->tvlv.handler_list_lock ->(work_completion)(&(&bat_priv->nc.work)->work) ->key#17 ->key#18 ->(work_completion)(&(&bat_priv->dat.work)->work) ->&hash->list_locks[i] ->(work_completion)(&(&bat_priv->bla.work)->work) ->key#20 ->(work_completion)(&(&bat_priv->mcast.work)->work) ->(work_completion)(&(&bat_priv->tt.work)->work) ->key#16 ->key#21 ->&bat_priv->tt.req_list_lock ->&bat_priv->tt.changes_list_lock ->&bat_priv->tt.roam_list_lock ->(work_completion)(&(&bat_priv->orig_work)->work) ->key#19 ->wq_mayday_lock ->&hn->hn_lock ->&pnettable->lock ->&pnetids_ndev->lock ->k-sk_lock-AF_INET6/1 ->&net->sctp.addr_wq_lock ->k-sk_lock-AF_INET ->k-slock-AF_INET#2 ->&sn->gssp_lock ->&cd->hash_lock ->(&net->can.stattimer) ->vmap_area_lock ->purge_vmap_area_lock ->stock_lock ->xfrm_state_gc_work ->&net->xfrm.xfrm_state_lock ->&sb->s_type->i_lock_key#23 ->rename_lock.seqcount ->(work_completion)(&(&net->ipv6.addr_chk_work)->work) ->ip6_fl_lock ->(&net->ipv6.ip6_fib_timer) ->__ip_vs_mutex ->(&ipvs->dest_trash_timer) ->(work_completion)(&(&ipvs->expire_nodest_conn_work)->work) ->(work_completion)(&(&ipvs->defense_work)->work) ->(work_completion)(&(&ipvs->est_reload_work)->work) ->nfnl_subsys_ipset ->recent_lock ->hashlimit_mutex ->trans_gc_work ->nf_conntrack_mutex ->(work_completion)(&(&cnet->ecache.dwork)->work) ->tcp_metrics_lock ->k-clock-AF_INET ->(work_completion)(&net->xfrm.policy_hash_work) ->&net->xfrm.xfrm_policy_lock ->(work_completion)(&net->xfrm.state_hash_work) ->&xa->xa_lock#4 ->genl_sk_destructing_waitq.lock ->&rnp->exp_wq[3] ->rcu_state.barrier_mutex.wait_lock ->&sem->wait_lock ->rcu_state.exp_mutex ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&lock->wait_lock ->pcpu_alloc_mutex.wait_lock ->key ->(wq_completion)bond0#79 ->(wq_completion)tipc_rcv#8 ->(wq_completion)tipc_send#8 ->(wq_completion)tipc_crypto#8 ->(wq_completion)phy164 ->(wq_completion)phy163 ->(wq_completion)bond0#81 ->(wq_completion)tipc_rcv#9 ->(wq_completion)tipc_send#9 ->(wq_completion)tipc_crypto#9 ->(wq_completion)phy162 ->(wq_completion)phy161 ->(wq_completion)bond0#80 ->(wq_completion)tipc_rcv#10 ->(wq_completion)tipc_send#10 ->(wq_completion)tipc_crypto#10 ->(wq_completion)phy136 ->(wq_completion)phy135 FD: 25 BD: 51 +.+.: stack_depot_init_mutex ->&rq->__lock FD: 31 BD: 70 ++++: net_rwsem ->&list->lock#2 ->&rq->__lock ->pool_lock#2 ->&obj_hash[i].lock ->nl_table_lock ->nl_table_wait.lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&n->list_lock ->&cfs_rq->removed.lock ->&base->lock FD: 7 BD: 111 ..-.: proc_inum_ida.xa_lock ->pool_lock#2 ->&c->lock FD: 701 BD: 46 +.+.: rtnl_mutex ->&c->lock ->&zone->lock ->&____s->seqcount ->pool_lock#2 ->fs_reclaim ->pcpu_alloc_mutex ->&xa->xa_lock#4 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#20 ->&dir->lock#2 ->dev_hotplug_mutex ->dev_base_lock ->input_pool.lock ->nl_table_lock ->nl_table_wait.lock ->net_rwsem ->batched_entropy_u32.lock ->&tbl->lock ->sysctl_lock ->krc.lock ->&rq->__lock ->stack_depot_init_mutex ->cpu_hotplug_lock ->kthread_create_lock ->&p->pi_lock ->&x->wait ->wq_pool_mutex ->crngs.lock ->lweventlist_lock ->rtnl_mutex.wait_lock ->proc_subdir_lock ->proc_inum_ida.xa_lock ->&k->k_lock ->param_lock ->(console_sem).lock ->&rdev->wiphy.mtx ->&base->lock ->subsys mutex#55 ->&sdata->sec_mtx ->&local->iflist_mtx#2 ->lock#7 ->failover_lock ->&cfs_rq->removed.lock ->&tn->lock ->&idev->mc_lock ->&ndev->lock ->rcu_node_0 ->&pnettable->lock ->smc_ib_devices.mutex ->&(&net->nexthop.notifier_chain)->rwsem ->reg_requests_lock ->reg_pending_beacons_lock ->devnet_rename_sem ->&x->wait#3 ->&nft_net->commit_mutex ->&ent->pde_unload_lock ->target_list_lock ->rlock-AF_NETLINK ->(inetaddr_validator_chain).rwsem ->(inetaddr_chain).rwsem ->_xmit_LOOPBACK ->netpoll_srcu ->&in_dev->mc_tomb_lock ->&im->lock ->fib_info_lock ->cbs_list_lock ->(inet6addr_validator_chain).rwsem ->&net->ipv6.addrconf_hash_lock ->&ifa->lock ->&tb->tb6_lock ->&n->list_lock ->&dev_addr_list_lock_key ->napi_hash_lock ->lapb_list_lock ->x25_neigh_list_lock ->console_owner_lock ->console_owner ->_xmit_ETHER ->_xmit_SLIP ->free_vmap_area_lock ->vmap_area_lock ->init_mm.page_table_lock ->&cma->lock ->cma_mutex ->remove_cache_srcu ->&rfkill->lock ->&sem->wait_lock ->_xmit_VOID ->_xmit_X25 ->&lapbeth->up_lock ->&lapb->lock ->class ->(&tbl->proxy_timer) ->quarantine_lock ->&dev->tx_global_lock ->&rnp->exp_wq[2] ->&sch->q.lock ->&rnp->exp_wq[3] ->&dir->lock ->&ul->lock#2 ->&n->lock ->&wpan_dev->association_lock ->dev_addr_sem ->_xmit_IEEE802154 ->&nr_netdev_addr_lock_key ->listen_lock ->pool_lock ->key ->pcpu_lock ->percpu_counters_lock ->&r->consumer_lock ->&mm->mmap_lock ->(switchdev_blocking_notif_chain).rwsem ->&br->hash_lock ->nf_hook_mutex ->j1939_netdev_lock ->&bat_priv->tvlv.handler_list_lock ->&bat_priv->tvlv.container_list_lock ->&bat_priv->softif_vlan_list_lock ->key#16 ->&bat_priv->tt.changes_list_lock ->kernfs_idr_lock ->&rnp->exp_wq[0] ->noop_qdisc.q.lock ->&rnp->exp_wq[1] ->tk_core.seq.seqcount ->&wq->mutex ->init_lock ->hrtimer_bases.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->deferred_lock ->&br->lock ->&pn->hash_lock ->&hard_iface->bat_iv.ogm_buff_mutex ->ptype_lock ->_xmit_NONE ->lock#9 ->&meta->lock ->&hsr->list_lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->mount_lock ->&xa->xa_lock#18 ->&dev_addr_list_lock_key#3/1 ->req_lock ->&x->wait#11 ->subsys mutex#75 ->bpf_devs_lock ->(work_completion)(&(&devlink_port->type_warn_dw)->work) ->&devlink_port->type_lock ->&vn->sock_lock ->&wg->device_update_lock ->&rcu_state.expedited_wq ->_xmit_SIT ->&bridge_netdev_addr_lock_key/1 ->_xmit_TUNNEL ->_xmit_IPGRE ->_xmit_TUNNEL6 ->&dev_addr_list_lock_key/1 ->&dev_addr_list_lock_key#2/1 ->_xmit_ETHER/1 ->&nn->netlink_tap_lock ->&batadv_netdev_addr_lock_key/1 ->&vlan_netdev_addr_lock_key/1 ->&macvlan_netdev_addr_lock_key/1 ->&ipvlan->addrs_lock ->&macsec_netdev_addr_lock_key/1 ->key#20 ->&bat_priv->tt.commit_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#8 ->k-slock-AF_INET/1 ->k-sk_lock-AF_INET ->k-slock-AF_INET#2 ->k-sk_lock-AF_INET6 ->k-slock-AF_INET6 ->&ul->lock ->&____s->seqcount#2 ->&tun->lock ->dev->qdisc_tx_busylock ?: &qdisc_tx_busylock ->__ip_vs_mutex ->flowtable_lock ->&idev->mc_query_lock ->(work_completion)(&(&idev->mc_report_work)->work) ->&hwstats->hwsdev_list_lock ->&net->xdp.lock ->mirred_list_lock ->&idev->mc_report_lock ->&sb->s_type->i_lock_key#23 ->&dentry->d_lock ->rename_lock.seqcount ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->&pnn->pndevs.lock ->&pnn->routes.lock ->dev_pm_qos_sysfs_mtx ->deferred_probe_mutex ->device_links_lock ->&net->xfrm.xfrm_state_lock ->&net->xfrm.xfrm_policy_lock ->&sb->s_type->i_lock_key#7 ->&rnp->exp_lock ->rcu_state.exp_mutex ->netlbl_unlhsh_lock ->nr_list_lock ->nr_neigh_list_lock ->&bpq_netdev_addr_lock_key ->(work_completion)(&wdev->disconnect_wk) ->(work_completion)(&wdev->pmsr_free_wk) ->&rdev->dev_wait ->&fq->lock ->&app->lock ->(&app->join_timer) ->(&app->periodic_timer) ->&list->lock#10 ->(&app->join_timer)#2 ->&app->lock#2 ->&list->lock#11 ->(work_completion)(&(&priv->scan_result)->work) ->(work_completion)(&(&priv->connect)->work) ->(&hsr->prune_timer) ->(&hsr->announce_timer) ->key#19 ->&bat_priv->forw_bcast_list_lock ->&bat_priv->forw_bat_list_lock ->(work_completion)(&(&forw_packet_aggr->delayed_work)->work) ->(&pmctx->ip6_mc_router_timer) ->(&pmctx->ip4_mc_router_timer) ->(work_completion)(&ht->run_work) ->&ht->mutex ->&br->multicast_lock ->(work_completion)(&(&br->gc_work)->work) ->dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 ->&table->hash[i].lock ->k-clock-AF_INET6 ->k-clock-AF_INET ->&r->consumer_lock#2 ->&wg->socket_update_lock ->(work_completion)(&(&bond->mii_work)->work) ->(work_completion)(&(&bond->arp_work)->work) ->(work_completion)(&(&bond->alb_work)->work) ->(work_completion)(&(&bond->ad_work)->work) ->(work_completion)(&(&bond->mcast_work)->work) ->(work_completion)(&(&bond->slave_arr_work)->work) ->(&br->hello_timer) ->(&br->topology_change_timer) ->(&br->tcn_timer) ->(&brmctx->ip4_mc_router_timer) ->(&brmctx->ip4_other_query.timer) ->(&brmctx->ip4_other_query.delay_timer) ->(&brmctx->ip4_own_query.timer) ->(&brmctx->ip6_mc_router_timer) ->(&brmctx->ip6_other_query.timer) ->(&brmctx->ip6_other_query.delay_timer) ->(&brmctx->ip6_own_query.timer) ->raw_notifier_lock ->bcm_notifier_lock ->isotp_notifier_lock ->(work_completion)(&port->bc_work) ->(work_completion)(&port->wq) ->(work_completion)(&(&slave->notify_work)->work) ->_xmit_NETROM#2 ->(&mp->timer) ->(work_completion)(&br->mcast_gc_work) ->rcu_state.barrier_mutex ->stock_lock ->&caifn->caifdevs.lock ->key#23 ->&net->rules_mod_lock ->(&mrt->ipmr_expire_timer) ->reg_indoor_lock ->rcu_state.exp_mutex.wait_lock ->&pmc->lock ->pcpu_alloc_mutex.wait_lock ->gdp_mutex.wait_lock ->uevent_sock_mutex.wait_lock ->wq_pool_mutex.wait_lock ->sk_lock-AF_INET ->slock-AF_INET#2 ->&lock->wait_lock ->__ip_vs_mutex.wait_lock ->&x->wait#10 ->deferred_probe_mutex.wait_lock ->team->team_lock_key#72 ->_xmit_PHONET_PIPE ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->team->team_lock_key#76 ->team->team_lock_key#82 ->dev_pm_qos_sysfs_mtx.wait_lock ->team->team_lock_key#83 ->team->team_lock_key#84 FD: 39 BD: 189 +.+.: lock ->kernfs_idr_lock ->cgroup_idr_lock ->pidmap_lock ->drm_minor_lock ->&file_private->table_lock ->&q->queue_lock ->&group->inotify_data.idr_lock FD: 13 BD: 200 +.+.: kernfs_idr_lock ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->&n->list_lock ->&____s->seqcount#2 FD: 82 BD: 191 ++++: &root->kernfs_rwsem ->&root->kernfs_iattr_rwsem ->kernfs_idr_lock ->&obj_hash[i].lock ->pool_lock#2 ->&____s->seqcount ->&rq->__lock ->&cfs_rq->removed.lock ->&sem->wait_lock ->rcu_node_0 ->quarantine_lock ->inode_hash_lock ->fs_reclaim ->&c->lock ->mmu_notifier_invalidate_range_start ->tk_core.seq.seqcount ->&sb->s_type->i_lock_key#24 ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->kernfs_rename_lock ->&n->list_lock ->&p->pi_lock ->&sb->s_type->i_lock_key#30 ->&sb->s_type->i_lock_key#31 ->&xa->xa_lock#5 ->stock_lock ->&____s->seqcount#2 ->key ->pcpu_lock ->percpu_counters_lock ->&meta->lock ->&base->lock ->&rcu_state.expedited_wq ->pool_lock FD: 1 BD: 4 ++++: file_systems_lock FD: 51 BD: 195 ++++: &root->kernfs_iattr_rwsem ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->iattr_mutex ->&sem->wait_lock ->tk_core.seq.seqcount ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 1 +.+.: dq_list_lock FD: 5 BD: 45 +.+.: sb_lock ->unnamed_dev_ida.xa_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 86 BD: 1 +.+.: &type->s_umount_key/1 ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->percpu_counters_lock ->crngs.lock ->&sbinfo->stat_lock ->&sb->s_type->i_lock_key ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&dentry->d_lock ->fs_reclaim ->mmu_notifier_invalidate_range_start FD: 1 BD: 39 +.+.: list_lrus_mutex FD: 1 BD: 46 ....: unnamed_dev_ida.xa_lock FD: 1 BD: 11 +.+.: &sbinfo->stat_lock FD: 47 BD: 135 +.+.: &sb->s_type->i_lock_key ->&dentry->d_lock ->&xa->xa_lock#9 ->bit_wait_table + i FD: 1 BD: 309 +.+.: &s->s_inode_list_lock FD: 36 BD: 386 +.+.: &dentry->d_lock ->&wq ->&dentry->d_lock/1 ->&obj_hash[i].lock ->pool_lock#2 ->&wq#2 ->&lru->node[i].lock ->sysctl_lock ->&dentry->d_lock/2 ->&p->pi_lock FD: 2 BD: 27 ....: mnt_id_ida.xa_lock ->pool_lock#2 FD: 40 BD: 134 +.+.: mount_lock ->mount_lock.seqcount ->&dentry->d_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 38 BD: 134 +.+.: mount_lock.seqcount ->&new_ns->poll ->&dentry->d_lock ->&obj_hash[i].lock ->pool_lock#2 ->&p->pi_lock FD: 75 BD: 1 +.+.: &type->s_umount_key#2/1 ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->&c->lock ->&____s->seqcount ->list_lrus_mutex ->sb_lock ->&sb->s_type->i_lock_key#2 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 37 BD: 131 +.+.: &sb->s_type->i_lock_key#2 ->&dentry->d_lock FD: 1 BD: 4 ..-.: ucounts_lock FD: 38 BD: 148 +.+.: init_fs.lock ->init_fs.seq.seqcount ->&dentry->d_lock FD: 1 BD: 142 +.+.: init_fs.seq.seqcount FD: 2 BD: 92 -.-.: jiffies_lock ->jiffies_seq.seqcount FD: 26 BD: 1 -.-.: log_wait.lock ->&p->pi_lock FD: 83 BD: 1 +.+.: &type->s_umount_key#3/1 ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&____s->seqcount ->&c->lock ->&sb->s_type->i_lock_key#3 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 46 BD: 267 +.+.: &sb->s_type->i_lock_key#3 ->&dentry->d_lock ->&xa->xa_lock#9 FD: 1 BD: 113 ++++: proc_subdir_lock FD: 75 BD: 1 +.+.: &type->s_umount_key#4/1 ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&sb->s_type->i_lock_key#4 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 37 BD: 2 +.+.: &sb->s_type->i_lock_key#4 ->&dentry->d_lock FD: 31 BD: 93 ....: cgroup_file_kn_lock ->kernfs_notify_lock FD: 33 BD: 92 ..-.: css_set_lock ->cgroup_file_kn_lock ->&p->pi_lock ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock FD: 2 BD: 190 +...: cgroup_idr_lock ->pool_lock#2 FD: 50 BD: 91 +.+.: cpuset_mutex ->callback_lock ->jump_label_mutex ->&p->pi_lock ->&p->alloc_lock ->cpuset_attach_wq.lock FD: 1 BD: 92 ....: callback_lock FD: 58 BD: 16 +.+.: blkcg_pol_mutex ->pcpu_alloc_mutex ->fs_reclaim ->pool_lock#2 FD: 1 BD: 16 +.+.: devcgroup_mutex FD: 35 BD: 91 +.+.: freezer_mutex ->freezer_lock ->rcu_node_0 ->&rq->__lock ->freezer_mutex.wait_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->&cfs_rq->removed.lock FD: 44 BD: 49 +.+.: rcu_state.exp_mutex ->rcu_node_0 ->rcu_state.exp_wake_mutex ->&rcu_state.expedited_wq ->&obj_hash[i].lock ->&rnp->exp_wq[3] ->&rq->__lock ->&rnp->exp_wq[1] ->&rnp->exp_wq[2] ->&cfs_rq->removed.lock ->pool_lock#2 ->&rnp->exp_wq[0] ->rcu_state.exp_mutex.wait_lock ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock FD: 37 BD: 95 +.+.: rcu_state.exp_wake_mutex ->rcu_node_0 ->&rnp->exp_lock ->&rnp->exp_wq[0] ->&rnp->exp_wq[1] ->&rnp->exp_wq[2] ->&rnp->exp_wq[3] ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_state.exp_wake_mutex.wait_lock FD: 1 BD: 96 +.+.: &rnp->exp_lock FD: 26 BD: 97 ....: &rnp->exp_wq[0] ->&p->pi_lock FD: 26 BD: 98 ....: &rnp->exp_wq[1] ->&p->pi_lock FD: 1 BD: 96 ....: init_sighand.siglock FD: 1 BD: 3 +.+.: init_files.file_lock FD: 13 BD: 200 ....: pidmap_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->&____s->seqcount#2 FD: 120 BD: 90 ++++: cgroup_threadgroup_rwsem ->css_set_lock ->&p->pi_lock ->tk_core.seq.seqcount ->tasklist_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->&c->lock ->&____s->seqcount ->rcu_node_0 ->key ->pcpu_lock ->percpu_counters_lock ->&sighand->siglock ->cgroup_threadgroup_rwsem.rss.gp_wait.lock ->&x->wait#3 ->inode_hash_lock ->fs_reclaim ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#30 ->&root->kernfs_iattr_rwsem ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->cpuset_mutex ->freezer_mutex ->&p->alloc_lock ->&____s->seqcount#2 ->cgroup_threadgroup_rwsem.waiters.lock ->freezer_mutex.wait_lock ->&rcu_state.expedited_wq ->&n->list_lock ->stock_lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 25 BD: 937 -.-.: &p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock FD: 61 BD: 95 .+.+: tasklist_lock ->init_sighand.siglock ->&sighand->siglock ->&pid->wait_pidfd ->&obj_hash[i].lock ->quarantine_lock ->&base->lock ->stock_lock ->&p->alloc_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 1 BD: 3034 -.-.: &per_cpu_ptr(group->pcpu, cpu)->seq FD: 1 BD: 1 ....: (kthreadd_done).wait.lock FD: 38 BD: 100 ....: &sighand->siglock ->&sig->wait_chldexit ->input_pool.lock ->&(&sig->stats_lock)->lock ->&p->pi_lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->hrtimer_bases.lock ->&obj_hash[i].lock ->&sighand->signalfd_wqh ->&tty->ctrl.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&rq->__lock ->stock_lock ->&n->list_lock ->&____s->seqcount#2 ->&base->lock ->quarantine_lock FD: 45 BD: 146 +.+.: &p->alloc_lock ->&____s->seqcount#2 ->init_fs.lock ->&fs->lock ->&x->wait ->cpu_asid_lock ->&x->wait#25 ->&newf->file_lock ->&p->pi_lock FD: 1 BD: 3120 .-.-: &____s->seqcount#2 FD: 48 BD: 535 +.+.: fs_reclaim ->mmu_notifier_invalidate_range_start ->&mapping->i_mmap_rwsem ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->icc_bw_lock ->rcu_node_0 ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->&rcu_state.expedited_wq ->&____s->seqcount FD: 33 BD: 557 +.+.: mmu_notifier_invalidate_range_start ->dma_fence_map ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 106 +.+.: kthread_create_lock FD: 26 BD: 165 ....: &x->wait ->&p->pi_lock FD: 36 BD: 1 +.+.: sched_map-wait-type-override ->&pool->lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 26 BD: 529 ....: (worker)->lock ->&p->pi_lock FD: 32 BD: 94 +.+.: wq_pool_attach_mutex ->&p->pi_lock ->&x->wait#7 ->&pool->lock ->&rq->__lock ->wq_pool_attach_mutex.wait_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 529 ..-.: wq_mayday_lock FD: 1 BD: 93 ....: &xa->xa_lock FD: 30 BD: 1 +.-.: (&pool->mayday_timer) ->&pool->lock ->&obj_hash[i].lock ->&base->lock FD: 52 BD: 1 +.+.: (wq_completion)rcu_gp ->(work_completion)(&rnp->exp_poll_wq) ->(work_completion)(&(&ssp->srcu_sup->work)->work) ->(work_completion)(&sdp->work) ->(work_completion)(&rew->rew_work) ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 31 BD: 2 +.+.: (work_completion)(&rnp->exp_poll_wq) ->&rnp->exp_poll_lock FD: 4 BD: 1 -.-.: (null) ->tk_core.seq.seqcount FD: 13 BD: 1 +.-.: (&wq_watchdog_timer) ->&obj_hash[i].lock ->&base->lock FD: 500 BD: 1 +.+.: (wq_completion)events_unbound ->(work_completion)(&(&kfence_timer)->work) ->(work_completion)(&entry->work) ->(next_reseed).work ->(stats_flush_dwork).work ->(work_completion)(&sub_info->work) ->deferred_probe_work ->connector_reaper_work ->(reaper_work).work ->(work_completion)(&barr->work) ->(work_completion)(&rdev->wiphy_work) ->(work_completion)(&port->bc_work) ->&rq->__lock ->(work_completion)(&pool->idle_cull_work) ->rcu_node_0 ->&rcu_state.expedited_wq FD: 227 BD: 2 +.+.: (work_completion)(&(&kfence_timer)->work) ->cpu_hotplug_lock ->allocation_wait.lock ->&rq->__lock ->&obj_hash[i].lock ->&base->lock ->&cfs_rq->removed.lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 26 BD: 3 -.-.: allocation_wait.lock ->&p->pi_lock FD: 3 BD: 3153 ..-.: batched_entropy_u8.lock ->crngs.lock FD: 1 BD: 3152 ..-.: kfence_freelist_lock FD: 1 BD: 586 ..-.: &meta->lock FD: 48 BD: 1 +.+.: rcu_tasks.tasks_gp_mutex ->&obj_hash[i].lock ->&base->lock ->tasks_rcu_exit_srcu_srcu_usage.lock ->&ACCESS_PRIVATE(sdp, lock) ->tasks_rcu_exit_srcu ->&x->wait#2 ->&rq->__lock ->kernel/rcu/tasks.h:152 ->(&timer.timer) ->rcu_tasks__percpu.cbs_pcpu_lock ->&x->wait#3 ->(console_sem).lock FD: 1 BD: 1 ....: rcu_tasks.cbs_gbl_lock FD: 13 BD: 3 ..-.: rcu_tasks__percpu.cbs_pcpu_lock ->&obj_hash[i].lock ->&base->lock FD: 26 BD: 97 ....: &rnp->exp_wq[2] ->&p->pi_lock FD: 30 BD: 6 ....: tasks_rcu_exit_srcu_srcu_usage.lock ->&obj_hash[i].lock FD: 1 BD: 17 ....: &ACCESS_PRIVATE(sdp, lock) FD: 1 BD: 2 ....: tasks_rcu_exit_srcu FD: 26 BD: 17 ....: &x->wait#2 ->&p->pi_lock FD: 226 BD: 1 +.+.: rcu_tasks_trace.tasks_gp_mutex ->cpu_hotplug_lock ->rcu_tasks_trace__percpu.cbs_pcpu_lock ->&x->wait#3 ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->(&timer.timer) ->(console_sem).lock FD: 1 BD: 1 ....: rcu_tasks_trace.cbs_gbl_lock FD: 35 BD: 2 +.+.: (work_completion)(&(&ssp->srcu_sup->work)->work) ->&ssp->srcu_sup->srcu_gp_mutex ->&ssp->srcu_sup->srcu_cb_mutex ->tasks_rcu_exit_srcu_srcu_usage.lock ->remove_cache_srcu_srcu_usage.lock ->&rq->__lock ->&obj_hash[i].lock ->&base->lock ->&ACCESS_PRIVATE(ssp->srcu_sup, lock) ->&cfs_rq->removed.lock ->pool_lock#2 FD: 34 BD: 3 +.+.: &ssp->srcu_sup->srcu_gp_mutex ->tasks_rcu_exit_srcu_srcu_usage.lock ->&ssp->srcu_sup->srcu_cb_mutex ->remove_cache_srcu_srcu_usage.lock ->&ACCESS_PRIVATE(ssp->srcu_sup, lock) ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 33 BD: 4 +.+.: &ssp->srcu_sup->srcu_cb_mutex ->tasks_rcu_exit_srcu_srcu_usage.lock ->remove_cache_srcu_srcu_usage.lock ->&ACCESS_PRIVATE(ssp->srcu_sup, lock) ->&obj_hash[i].lock ->&rq->__lock ->&base->lock FD: 29 BD: 2 +.+.: (work_completion)(&sdp->work) ->&ACCESS_PRIVATE(sdp, lock) ->&obj_hash[i].lock ->&x->wait#2 ->&rq->__lock ->rcu_node_0 ->&cfs_rq->removed.lock ->pool_lock#2 FD: 26 BD: 127 ..-.: &x->wait#3 ->&p->pi_lock FD: 1 BD: 2 ....: kernel/rcu/tasks.h:152 FD: 26 BD: 184 +.-.: (&timer.timer) ->&p->pi_lock FD: 30 BD: 1 ..-.: &(&kfence_timer)->timer FD: 26 BD: 97 ....: &rnp->exp_wq[3] ->&p->pi_lock FD: 13 BD: 90 ..-.: rcu_tasks_trace__percpu.cbs_pcpu_lock ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 89 ....: &ACCESS_PRIVATE(rtpcp, lock) FD: 1 BD: 1 +.+.: (memory_chain).rwsem FD: 51 BD: 91 +.+.: smpboot_threads_lock ->fs_reclaim ->pool_lock#2 ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&obj_hash[i].lock ->hrtimer_bases.lock FD: 26 BD: 443 ..-.: &rcu_state.gp_wq ->&p->pi_lock FD: 25 BD: 91 -.-.: &stop_pi_lock ->&rq->__lock FD: 1 BD: 91 -.-.: &stopper->lock FD: 1 BD: 2 +.+.: (module_notify_list).rwsem FD: 1 BD: 1 +.+.: ddebug_lock FD: 2 BD: 1 +.+.: cci_probing ->devtree_lock FD: 1 BD: 1 +.+.: ptlock_ptr(ptdesc) FD: 226 BD: 1 +.+.: watchdog_mutex ->cpu_hotplug_lock FD: 26 BD: 90 ....: &x->wait#4 ->&p->pi_lock FD: 847 BD: 1 +.+.: (wq_completion)events ->(work_completion)(&sscs.work) ->rdist_memreserve_cpuhp_cleanup_work ->(shepherd).work ->(work_completion)(&(&group->avgs_work)->work) ->(work_completion)(&rfkill_global_led_trigger_work) ->timer_update_work ->pcpu_balance_work ->(work_completion)(&p->wq) ->(debug_obj_work).work ->(work_completion)(&helper->damage_work) ->(work_completion)(&rfkill->sync_work) ->(linkwatch_work).work ->(work_completion)(&w->work) ->(work_completion)(&gadget->work) ->kernfs_notify_work ->async_lookup_work ->autoload_work ->(work_completion)(&barr->work) ->drain_vmap_work ->netstamp_work ->reg_work ->(work_completion)(&fw_work->work) ->(work_completion)(&s->destroy_work) ->(work_completion)(&(&krcp->monitor_work)->work) ->(work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) ->(work_completion)(&(&ovs_net->masks_rebalance)->work) ->(work_completion)(&ht->run_work) ->(work_completion)(&aux->work) ->(work_completion)(&w->work)#2 ->(deferred_probe_timeout_work).work ->(work_completion)(&sbi->s_sb_upd_work) ->(regulator_init_complete_work).work ->(work_completion)(&cgrp->bpf.release_work) ->(work_completion)(&w->w) ->deferred_process_work ->(work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) ->(work_completion)(&data->fib_event_work) ->(work_completion)(&(&hwstats->traffic_dw)->work) ->(work_completion)(&rdev->wiphy_work) ->wireless_nlevent_work ->(work_completion)(&(&conn->info_timer)->work) ->free_ipc_work ->(work_completion)(&(&devlink->rwork)->work) ->fqdir_free_work ->(work_completion)(&blkg->free_work) ->(work_completion)(&tty->hangup_work)#2 ->(ima_keys_delayed_work).work ->&rq->__lock FD: 28 BD: 2 +.+.: (work_completion)(&sscs.work) ->&x->wait#5 ->&obj_hash[i].lock ->hrtimer_bases.lock ->&x->wait#4 FD: 1 BD: 92 -.-.: &x->wait#5 FD: 2 BD: 147 +.+.: &newf->file_lock ->&newf->resize_wait FD: 1 BD: 1 ....: &p->vtime.seqcount FD: 39 BD: 89 +.+.: mem_hotplug_lock ->mem_hotplug_lock.rss.gp_wait.lock FD: 3 BD: 90 ..-.: mem_hotplug_lock.rss.gp_wait.lock ->&obj_hash[i].lock FD: 1 BD: 89 ....: mem_hotplug_lock.waiters.lock FD: 227 BD: 3 +.+.: cpu_add_remove_lock ->cpu_hotplug_lock ->cpu_hotplug_lock.waiters.lock ->cpu_hotplug_lock.rss.gp_wait.lock ->cpuset_hotplug_work FD: 3 BD: 89 ..-.: cpu_hotplug_lock.rss.gp_wait.lock ->&obj_hash[i].lock FD: 26 BD: 89 ....: cpu_hotplug_lock.waiters.lock ->&p->pi_lock FD: 1 BD: 4 +.+.: cpuset_hotplug_work FD: 1 BD: 1 ....: rcu_callback FD: 1 BD: 90 +.+.: pcp_batch_high_lock FD: 26 BD: 96 ....: (cpu_running).wait.lock ->&p->pi_lock FD: 26 BD: 96 ....: &x->wait#6 ->&p->pi_lock FD: 1 BD: 3034 -.-.: &cfs_rq->removed.lock FD: 1 BD: 95 ....: &x->wait#7 FD: 16 BD: 3034 -...: &rt_b->rt_runtime_lock ->&rt_rq->rt_runtime_lock ->tk_core.seq.seqcount ->hrtimer_bases.lock FD: 1 BD: 3035 -...: &rt_rq->rt_runtime_lock FD: 29 BD: 89 +.+.: stop_cpus_mutex ->&stopper->lock ->&stop_pi_lock ->&rq->__lock ->&x->wait#8 FD: 26 BD: 91 ....: &x->wait#8 ->&p->pi_lock FD: 226 BD: 2 +.+.: rdist_memreserve_cpuhp_cleanup_work ->cpu_hotplug_lock FD: 58 BD: 1 +.+.: sched_domains_mutex ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->pcpu_alloc_mutex ->&c->lock ->&____s->seqcount ->pcpu_lock FD: 1 BD: 3034 ....: &cp->lock FD: 77 BD: 1 +.+.: &type->s_umount_key#5/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&obj_hash[i].lock ->percpu_counters_lock ->crngs.lock ->&sbinfo->stat_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#5 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&dentry->d_lock FD: 37 BD: 133 +.+.: &sb->s_type->i_lock_key#5 ->&dentry->d_lock FD: 26 BD: 1 ....: (setup_done).wait.lock ->&p->pi_lock FD: 75 BD: 24 ++++: namespace_sem ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 ->mnt_id_ida.xa_lock ->pcpu_alloc_mutex ->&dentry->d_lock ->mount_lock ->rename_lock ->&obj_hash[i].lock ->&n->list_lock ->&____s->seqcount#2 ->&rq->__lock FD: 1 BD: 141 +.+.: &____s->seqcount#3 FD: 61 BD: 1 +.+.: &type->s_umount_key#6 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->&____s->seqcount ->&c->lock ->&lru->node[i].lock ->&sbinfo->stat_lock ->&obj_hash[i].lock FD: 26 BD: 388 +.+.: &lru->node[i].lock FD: 91 BD: 7 ++++: &sb->s_type->i_mutex_key ->namespace_sem ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->rename_lock.seqcount ->tomoyo_ss ->tk_core.seq.seqcount ->&sb->s_type->i_lock_key#2 ->&wb->list_lock ->&c->lock ->&____s->seqcount ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 38 BD: 22 +.+.: rename_lock ->rename_lock.seqcount FD: 37 BD: 150 +.+.: rename_lock.seqcount ->&dentry->d_lock ->&dentry->d_lock/2 FD: 1 BD: 135 ....: &new_ns->poll FD: 38 BD: 142 +.+.: &fs->lock ->&____s->seqcount#3 ->&dentry->d_lock FD: 1 BD: 85 +.+.: req_lock FD: 93 BD: 1 +.+.: of_mutex ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem FD: 1 BD: 154 ....: &x->wait#9 FD: 1 BD: 182 +.+.: &k->list_lock FD: 25 BD: 159 ++++: bus_type_sem ->&rq->__lock FD: 32 BD: 216 -...: &dev->power.lock ->&dev->power.wait_queue ->hrtimer_bases.lock ->&dev->power.lock/1 FD: 25 BD: 157 +.+.: dpm_list_mtx ->&rq->__lock FD: 57 BD: 162 +.+.: uevent_sock_mutex ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 ->nl_table_lock ->&obj_hash[i].lock ->nl_table_wait.lock ->&rq->__lock ->&cfs_rq->removed.lock ->&base->lock ->rlock-AF_NETLINK ->&n->list_lock ->remove_cache_srcu ->rcu_node_0 ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->uevent_sock_mutex.wait_lock ->&meta->lock ->&____s->seqcount#2 ->mmu_notifier_invalidate_range_start ->&rcu_state.expedited_wq ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock FD: 1 BD: 136 ....: running_helpers_waitq.lock FD: 1 BD: 171 +.+.: sysfs_symlink_target_lock FD: 2 BD: 224 +.+.: &k->k_lock ->klist_remove_lock FD: 1 BD: 1 ....: &dev->mutex FD: 1 BD: 1 +.+.: subsys mutex FD: 2 BD: 1 +.+.: memory_blocks.xa_lock ->pool_lock#2 FD: 1 BD: 1 +.+.: subsys mutex#2 FD: 1 BD: 1 +.+.: subsys mutex#3 FD: 57 BD: 91 +.+.: dev_pm_qos_mtx ->fs_reclaim ->pool_lock#2 ->&dev->power.lock ->pm_qos_lock ->&c->lock ->&____s->seqcount ->&rq->__lock FD: 1 BD: 92 ....: pm_qos_lock FD: 103 BD: 89 +.+.: dev_pm_qos_sysfs_mtx ->dev_pm_qos_mtx ->&root->kernfs_rwsem ->fs_reclaim ->pool_lock#2 ->lock ->&____s->seqcount ->&c->lock ->&rq->__lock ->&sem->wait_lock ->&p->pi_lock ->dev_pm_qos_sysfs_mtx.wait_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock FD: 51 BD: 15 +.+.: register_lock ->proc_subdir_lock ->fs_reclaim ->pool_lock#2 ->proc_inum_ida.xa_lock ->&c->lock ->&____s->seqcount FD: 26 BD: 569 -.-.: &rcu_state.expedited_wq ->&p->pi_lock FD: 1 BD: 1 +.+.: (cpufreq_policy_notifier_list).rwsem FD: 25 BD: 2 +.+.: (pm_chain_head).rwsem ->&rq->__lock FD: 1 BD: 1 +.+.: cpufreq_governor_mutex FD: 40 BD: 2 +.+.: (work_completion)(&rew->rew_work) ->rcu_node_0 ->rcu_state.exp_wake_mutex ->&rcu_state.expedited_wq ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->(&timer.timer) ->pool_lock#2 ->pool_lock ->&cfs_rq->removed.lock ->rcu_state.exp_wake_mutex.wait_lock ->&p->pi_lock FD: 1 BD: 2 ++++: binfmt_lock FD: 1 BD: 72 +.+.: pin_fs_lock FD: 75 BD: 1 +.+.: &type->s_umount_key#7/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->&____s->seqcount ->&c->lock ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#6 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 37 BD: 3 +.+.: &sb->s_type->i_lock_key#6 ->&dentry->d_lock FD: 63 BD: 1 +.+.: &sb->s_type->i_mutex_key#2 ->&sb->s_type->i_lock_key#6 ->rename_lock.seqcount ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->mmu_notifier_invalidate_range_start ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&c->lock ->&____s->seqcount FD: 26 BD: 389 ....: &wq ->&p->pi_lock FD: 75 BD: 1 +.+.: &type->s_umount_key#8/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#7 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 37 BD: 70 +.+.: &sb->s_type->i_lock_key#7 ->&dentry->d_lock ->&p->pi_lock FD: 88 BD: 68 +.+.: &sb->s_type->i_mutex_key#3 ->&sb->s_type->i_lock_key#7 ->rename_lock.seqcount ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->mmu_notifier_invalidate_range_start ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&c->lock ->&____s->seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock ->&rq->__lock ->(console_sem).lock ->&n->list_lock ->&obj_hash[i].lock ->rcu_node_0 ->pin_fs_lock ->mount_lock ->&fsnotify_mark_srcu ->&xa->xa_lock#9 ->&____s->seqcount#2 ->remove_cache_srcu ->&rcu_state.gp_wq ->&cfs_rq->removed.lock ->&xa->xa_lock#5 ->stock_lock ->&rcu_state.expedited_wq ->key ->pcpu_lock ->percpu_counters_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 1 BD: 22 +.+.: chrdevs_lock FD: 724 BD: 2 ++++: cb_lock ->genl_mutex ->fs_reclaim ->pool_lock#2 ->rlock-AF_NETLINK ->rtnl_mutex ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->&____s->seqcount ->&rdev->wiphy.mtx ->nlk_cb_mutex-GENERIC ->quarantine_lock ->remove_cache_srcu ->&xa->xa_lock#16 ->genl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->rtnl_mutex.wait_lock ->&____s->seqcount#2 ->rcu_node_0 ->&lock->wait_lock ->&cfs_rq->removed.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&rcu_state.expedited_wq ->(console_sem).lock ->console_owner_lock ->console_owner FD: 711 BD: 3 +.+.: genl_mutex ->fs_reclaim ->pool_lock#2 ->nl_table_lock ->nl_table_wait.lock ->&c->lock ->&n->list_lock ->rlock-AF_NETLINK ->&obj_hash[i].lock ->&____s->seqcount ->&zone->lock ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->genl_mutex.wait_lock ->hwsim_radio_lock ->&x->wait#9 ->batched_entropy_u32.lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->subsys mutex#53 ->device_links_lock ->&k->k_lock ->deferred_probe_mutex ->pcpu_alloc_mutex ->cpu_hotplug_lock ->wq_pool_mutex ->crngs.lock ->triggers_list_lock ->leds_list_lock ->rfkill_global_mutex ->rfkill_global_mutex.wait_lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->(inetaddr_chain).rwsem ->inet6addr_chain.lock ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->&sem->wait_lock ->rcu_node_0 ->quarantine_lock ->uevent_sock_mutex.wait_lock ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&rcu_state.expedited_wq ->(console_sem).lock ->console_owner_lock ->console_owner ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&base->lock FD: 1 BD: 4 +.+.: subsys mutex#4 FD: 4 BD: 5 ....: async_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 356 BD: 2 +.+.: (work_completion)(&entry->work) ->tk_core.seq.seqcount ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&k->list_lock ->&____s->seqcount ->lock ->&root->kernfs_rwsem ->&c->lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->async_lock ->async_done.lock ->&dentry->d_lock ->&sb->s_type->i_mutex_key ->sb_writers#2 ->&sb->s_type->i_lock_key#2 ->quarantine_lock FD: 1 BD: 3 +.+.: regulator_list_mutex FD: 1 BD: 17 .+.+: device_links_srcu FD: 3 BD: 16 +.+.: fwnode_link_lock ->&k->k_lock FD: 28 BD: 91 +.+.: device_links_lock ->&k->list_lock ->&k->k_lock ->&rq->__lock FD: 1 BD: 18 ....: &dev->devres_lock FD: 4 BD: 16 +.+.: pinctrl_list_mutex ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 16 +.+.: pinctrl_maps_mutex FD: 1 BD: 4 +.+.: regulator_nesting_mutex FD: 2 BD: 1 +.+.: regulator_ww_class_mutex ->regulator_nesting_mutex FD: 97 BD: 91 +.+.: gdp_mutex ->&k->list_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&c->lock ->&____s->seqcount ->kobj_ns_type_lock ->&____s->seqcount#2 ->&rq->__lock ->sysfs_symlink_target_lock ->kernfs_idr_lock ->&obj_hash[i].lock ->&sem->wait_lock ->&p->pi_lock ->gdp_mutex.wait_lock ->&cfs_rq->removed.lock ->&n->list_lock ->remove_cache_srcu FD: 1 BD: 1 +.+.: (reboot_notifier_list).rwsem FD: 3 BD: 3 +.+.: subsys mutex#5 ->&k->k_lock FD: 27 BD: 92 +.+.: deferred_probe_mutex ->&rq->__lock ->deferred_probe_mutex.wait_lock ->rcu_node_0 FD: 1 BD: 16 ....: probe_waitqueue.lock FD: 26 BD: 3 ....: async_done.lock ->&p->pi_lock FD: 76 BD: 1 +.+.: &type->s_umount_key#9/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&____s->seqcount ->&c->lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#8 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 38 BD: 51 +.+.: &sb->s_type->i_lock_key#8 ->&dentry->d_lock ->&p->pi_lock ->bit_wait_table + i FD: 34 BD: 7 +.+.: vmap_purge_lock ->purge_vmap_area_lock ->free_vmap_area_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->vmap_purge_lock.wait_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 4 +.+.: &fp->aux->used_maps_mutex FD: 1 BD: 1 +.+.: proto_list_mutex FD: 1 BD: 1 +.+.: targets_mutex FD: 27 BD: 275 ...-: nl_table_lock ->pool_lock#2 ->&obj_hash[i].lock ->nl_table_wait.lock FD: 26 BD: 276 ..-.: nl_table_wait.lock ->&p->pi_lock FD: 1 BD: 1 +.+.: net_family_lock FD: 2 BD: 5 ....: net_generic_ids.xa_lock ->pool_lock#2 FD: 4 BD: 54 ..-.: &dir->lock ->&obj_hash[i].lock ->pool_lock#2 FD: 34 BD: 5 +.+.: k-sk_lock-AF_NETLINK ->k-slock-AF_NETLINK ->&rq->__lock FD: 1 BD: 6 +...: k-slock-AF_NETLINK FD: 2 BD: 101 ..-.: rhashtable_bucket ->rhashtable_bucket/1 FD: 1 BD: 49 ....: &cma->lock FD: 41 BD: 49 +.+.: cma_mutex ->&zone->lock ->&obj_hash[i].lock ->lock#2 FD: 27 BD: 1 +.+.: pcpu_drain_mutex ->&pcp->lock FD: 34 BD: 51 +.+.: lock#2 ->&obj_hash[i].lock ->&rq->__lock ->(work_completion)(work) FD: 1 BD: 1 +.+.: &pool->lock#2 FD: 1 BD: 92 ....: freezer_lock FD: 1 BD: 1 ....: audit_backlog_wait.lock FD: 1 BD: 1 ....: &list->lock FD: 26 BD: 1 ....: kauditd_wait.lock ->&p->pi_lock FD: 1 BD: 1 ....: printk_ratelimit_state.lock FD: 3 BD: 2 +.+.: lock#3 ->&zone->lock FD: 53 BD: 1 +.+.: khugepaged_mutex ->fs_reclaim ->pool_lock#2 ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&obj_hash[i].lock ->lock#3 ->pcp_batch_high_lock FD: 127 BD: 7 ++++: &(&priv->bus_notifier)->rwsem ->&device->physical_node_lock ->iommu_probe_device_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&rq->__lock ->&c->lock ->&____s->seqcount ->i2c_dev_list_lock ->&x->wait#9 ->&obj_hash[i].lock ->chrdevs_lock ->&k->list_lock ->gdp_mutex ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#61 FD: 3 BD: 6 +.+.: subsys mutex#6 ->&k->k_lock FD: 4 BD: 1 +.+.: subsys mutex#7 ->&k->list_lock ->&k->k_lock FD: 1 BD: 1 +.+.: regmap_debugfs_early_lock FD: 1 BD: 1 +.+.: (acpi_reconfig_chain).rwsem FD: 1 BD: 1 +.+.: __i2c_board_lock FD: 1 BD: 590 -.-.: quarantine_lock FD: 52 BD: 1 +.+.: core_lock ->&k->list_lock ->&k->k_lock ->fs_reclaim ->pool_lock#2 FD: 35 BD: 398 .+.+: remove_cache_srcu ->quarantine_lock ->&c->lock ->&n->list_lock ->&obj_hash[i].lock ->&rq->__lock ->pool_lock#2 ->rcu_node_0 ->&____s->seqcount ->&cfs_rq->removed.lock ->key ->pcpu_lock ->percpu_counters_lock ->&rcu_state.expedited_wq ->&base->lock ->stock_lock ->&meta->lock ->kfence_freelist_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 2 BD: 1 +.+.: thermal_governor_lock ->thermal_list_lock FD: 1 BD: 2 +.+.: thermal_list_lock FD: 32 BD: 1 +.+.: cpuidle_lock ->&obj_hash[i].lock ->(console_sem).lock FD: 53 BD: 1 +.+.: k-sk_lock-AF_QIPCRTR ->k-slock-AF_QIPCRTR ->fs_reclaim ->qrtr_ports.xa_lock ->pool_lock#2 ->qrtr_node_lock ->&obj_hash[i].lock FD: 1 BD: 2 +...: k-slock-AF_QIPCRTR FD: 1 BD: 2 +.+.: qrtr_ports.xa_lock FD: 26 BD: 3 +.+.: qrtr_node_lock ->qrtr_nodes_lock ->&rq->__lock FD: 51 BD: 96 ++++: (crypto_chain).rwsem ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&obj_hash[i].lock FD: 306 BD: 2 +.+.: tty_mutex ->(console_sem).lock ->console_lock ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->tty_ldiscs_lock ->&obj_hash[i].lock ->&k->list_lock ->&k->k_lock ->&tty->legacy_mutex ->stock_lock ->&xa->xa_lock#21 ->(work_completion)(&buf->work) ->&rq->__lock ->tty_mutex.wait_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&n->list_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->devpts_mutex FD: 227 BD: 1 +.+.: iova_cache_mutex ->cpu_hotplug_lock ->slab_mutex FD: 3 BD: 1 +.+.: subsys mutex#8 ->&k->k_lock FD: 1 BD: 1 ..-.: uidhash_lock FD: 38 BD: 49 +.+.: rcu_state.barrier_mutex ->rcu_state.barrier_lock ->&x->wait#24 ->&rq->__lock ->rcu_state.barrier_mutex.wait_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->rcu_node_0 FD: 52 BD: 1 +.+.: (work_completion)(&eval_map_work) ->trace_event_sem FD: 1 BD: 1 ....: oom_reaper_wait.lock FD: 1 BD: 1 +.+.: subsys mutex#9 FD: 1 BD: 1 ....: &pgdat->kcompactd_wait FD: 1 BD: 1 ....: hugetlb_lock FD: 118 BD: 1 +.+.: memory_tier_lock ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->&c->lock ->&____s->seqcount ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&k->k_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#10 FD: 1 BD: 2 +.+.: subsys mutex#10 FD: 1 BD: 1 +.+.: ksm_thread_mutex FD: 1 BD: 1 ....: ksm_thread_wait.lock FD: 1 BD: 1 +.+.: khugepaged_mm_lock FD: 1 BD: 1 ....: khugepaged_wait.lock FD: 1 BD: 2 +.+.: damon_ops_lock FD: 52 BD: 95 ++++: crypto_alg_sem ->(crypto_chain).rwsem FD: 105 BD: 8 +.+.: bio_slab_lock ->fs_reclaim ->pool_lock#2 ->slab_mutex ->bio_slabs.xa_lock FD: 8 BD: 9 +.+.: bio_slabs.xa_lock ->pool_lock#2 ->&c->lock ->&____s->seqcount FD: 51 BD: 1 +.+.: major_names_lock ->fs_reclaim ->pool_lock#2 ->major_names_spinlock ->&c->lock ->&obj_hash[i].lock ->loop_ctl_mutex ->&____s->seqcount FD: 1 BD: 2 +.+.: major_names_spinlock FD: 15 BD: 1 +.-.: (&rtpcp->lazy_timer) ->rcu_tasks_trace__percpu.cbs_pcpu_lock ->rcu_tasks__percpu.cbs_pcpu_lock FD: 51 BD: 1 +.+.: &pgdat->kswapd_lock ->fs_reclaim ->pool_lock#2 ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 30 BD: 1 ..-.: drivers/char/random.c:251 FD: 14 BD: 2 +.+.: (next_reseed).work ->&obj_hash[i].lock ->&base->lock ->input_pool.lock ->base_crng.lock FD: 30 BD: 1 ..-.: mm/vmstat.c:2022 FD: 226 BD: 2 +.+.: (shepherd).work ->cpu_hotplug_lock ->&obj_hash[i].lock ->&base->lock ->&rq->__lock FD: 32 BD: 1 +.+.: (wq_completion)mm_percpu_wq ->(work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) ->(work_completion)(work) FD: 27 BD: 2 +.+.: (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) ->&obj_hash[i].lock ->&base->lock ->&pcp->lock ->&rq->__lock FD: 3 BD: 6 +.+.: subsys mutex#11 ->&k->k_lock FD: 1 BD: 1 -...: shrink_qlist.lock FD: 30 BD: 5 ....: remove_cache_srcu_srcu_usage.lock ->&obj_hash[i].lock FD: 30 BD: 1 ..-.: &(&ssp->srcu_sup->work)->timer FD: 33 BD: 89 +.+.: flush_lock ->&obj_hash[i].lock ->(work_completion)(&sfw->work) ->&x->wait#10 ->&rq->__lock FD: 9 BD: 91 +.+.: (work_completion)(&sfw->work) ->&c->lock ->&n->list_lock ->&obj_hash[i].lock FD: 29 BD: 90 +.+.: (wq_completion)slub_flushwq ->(work_completion)(&sfw->work) ->(work_completion)(&barr->work) FD: 26 BD: 533 ....: &x->wait#10 ->&p->pi_lock FD: 27 BD: 94 +.+.: (work_completion)(&barr->work) ->&x->wait#10 ->&rq->__lock FD: 1 BD: 1 +.+.: prepare_lock FD: 89 BD: 1 +.+.: clk_debug_lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 FD: 1 BD: 1 +.+.: clocks_mutex FD: 358 BD: 1 +.+.: acpi_scan_lock ->semaphore->lock ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&x->wait#9 ->acpi_device_lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&k->k_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->subsys mutex#12 ->uevent_sock_mutex ->running_helpers_waitq.lock ->*(&acpi_gbl_reference_count_lock) ->&n->list_lock ->acpi_ioremap_lock ->quarantine_lock ->&device->physical_node_lock ->irq_domain_mutex ->&domain->mutex ->resource_lock ->&(&priv->bus_notifier)->rwsem ->fwnode_link_lock ->device_links_srcu ->acpi_pm_notifier_install_lock ->free_vmap_area_lock ->vmap_area_lock ->init_mm.page_table_lock ->subsys mutex#4 ->(console_sem).lock ->io_range_mutex ->pci_bus_sem ->gdp_mutex ->subsys mutex#17 ->acpi_hp_context_lock ->bridge_mutex ->pci_lock ->pci_acpi_companion_lookup_sem ->pci_slot_mutex ->resource_alignment_lock ->iort_msi_chip_lock ->subsys mutex#18 ->devtree_lock ->pci_rescan_remove_lock ->acpi_link_lock ->acpi_dep_list_lock ->power_resource_list_lock FD: 51 BD: 2 +.+.: acpi_device_lock ->fs_reclaim ->pool_lock#2 ->&xa->xa_lock#2 ->semaphore->lock ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount FD: 1 BD: 3 ....: &xa->xa_lock#2 FD: 1 BD: 2 +.+.: subsys mutex#12 FD: 94 BD: 29 +.+.: &device->physical_node_lock ->sysfs_symlink_target_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock FD: 121 BD: 2 +.+.: acpi_pm_notifier_install_lock ->semaphore->lock ->fs_reclaim ->pool_lock#2 ->*(&acpi_gbl_reference_count_lock) ->acpi_pm_notifier_lock ->&c->lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 118 BD: 3 +.+.: acpi_pm_notifier_lock ->fs_reclaim ->pool_lock#2 ->wakeup_ida.xa_lock ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#13 ->events_lock FD: 1 BD: 5 ....: wakeup_ida.xa_lock FD: 3 BD: 5 +.+.: subsys mutex#13 ->&k->k_lock FD: 1 BD: 5 ....: events_lock FD: 1 BD: 2 +.+.: acpi_wakeup_lock FD: 165 BD: 2 +.+.: port_mutex ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&k->k_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#14 ->&xa->xa_lock#3 ->&rq->__lock ->&port->mutex ->&cfs_rq->removed.lock FD: 1 BD: 3 +.+.: subsys mutex#14 FD: 1 BD: 3 ....: &xa->xa_lock#3 FD: 1 BD: 217 ....: &dev->power.wait_queue FD: 73 BD: 1 +.+.: (wq_completion)pm ->(work_completion)(&dev->power.work) FD: 72 BD: 2 +.+.: (work_completion)(&dev->power.work) ->&dev->power.lock ->&hub->irq_urb_lock ->(&hub->irq_urb_retry) ->&obj_hash[i].lock ->&base->lock ->hcd_urb_unlink_lock ->hcd_root_hub_lock ->usb_kill_urb_queue.lock ->&rq->__lock ->(work_completion)(&hub->tt.clear_work) ->&dum_hcd->dum->lock ->device_state_lock ->hcd_urb_list_lock ->&vhci_hcd->vhci->lock ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->fs_reclaim ->&x->wait#19 ->(&timer.timer) ->&c->lock ->&____s->seqcount ->&cfs_rq->removed.lock ->&port_lock_key FD: 156 BD: 11 +.+.: &port->mutex ->fs_reclaim ->pool_lock#2 ->(console_sem).lock ->&port_lock_key ->console_mutex ->ctrl_ida.xa_lock ->&x->wait#9 ->&obj_hash[i].lock ->&____s->seqcount ->&dev->power.lock ->&k->list_lock ->&c->lock ->lock ->&root->kernfs_rwsem ->&device->physical_node_lock ->semaphore->lock ->sysfs_symlink_target_lock ->&k->k_lock ->dpm_list_mtx ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#15 ->*(&acpi_gbl_reference_count_lock) ->&n->list_lock ->dev_pm_qos_sysfs_mtx ->kernfs_idr_lock ->deferred_probe_mutex ->device_links_lock ->mmu_notifier_invalidate_range_start ->gdp_mutex ->bus_type_sem ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->subsys mutex#16 ->chrdevs_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&cfs_rq->removed.lock ->&desc->request_mutex ->register_lock ->&irq_desc_lock_class ->proc_subdir_lock ->proc_inum_ida.xa_lock FD: 35 BD: 154 -...: &port_lock_key ->&dev->power.lock ->&port->lock ->&tty->write_wait FD: 25 BD: 13 +.+.: syslog_lock ->&rq->__lock FD: 37 BD: 150 -...: console_owner ->&port_lock_key ->console_owner_lock FD: 30 BD: 1 ..-.: &(&group->avgs_work)->timer FD: 30 BD: 1 ..-.: &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer FD: 30 BD: 1 ..-.: mm/memcontrol.c:679 FD: 26 BD: 2 +.+.: (work_completion)(&(&group->avgs_work)->work) ->&group->avgs_lock ->&rq->__lock FD: 25 BD: 3 +.+.: &group->avgs_lock ->&per_cpu_ptr(group->pcpu, cpu)->seq ->&obj_hash[i].lock ->&base->lock ->&rq->__lock FD: 26 BD: 2 +.+.: (stats_flush_dwork).work ->cgroup_rstat_lock ->&obj_hash[i].lock ->&base->lock ->&rq->__lock FD: 2 BD: 18 ....: cgroup_rstat_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 1 BD: 3043 ..-.: per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 1 BD: 12 ....: ctrl_ida.xa_lock FD: 1 BD: 12 +.+.: subsys mutex#15 FD: 1 BD: 225 +.+.: klist_remove_lock FD: 26 BD: 85 ....: &x->wait#11 ->&p->pi_lock FD: 131 BD: 1 .+.+: sb_writers ->mount_lock ->&type->i_mutex_dir_key/1 ->&sb->s_type->i_mutex_key#4 ->tk_core.seq.seqcount ->&sb->s_type->i_lock_key#5 ->&wb->list_lock ->&type->i_mutex_dir_key#2 ->&sem->wait_lock ->&p->pi_lock ->&rq->__lock ->&s->s_inode_list_lock ->&obj_hash[i].lock ->&sbinfo->stat_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->&cfs_rq->removed.lock ->pool_lock#2 ->&dentry->d_lock ->tomoyo_ss ->&xattrs->lock FD: 113 BD: 2 +.+.: &type->i_mutex_dir_key/1 ->rename_lock.seqcount ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->&obj_hash[i].lock ->&sbinfo->stat_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#5 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&____s->seqcount ->&c->lock ->&xattrs->lock ->&simple_offset_xa_lock ->smack_known_lock ->&sb->s_type->i_mutex_key#4 ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->&rq->__lock ->tomoyo_ss ->&u->bindlock ->&n->list_lock ->&sem->wait_lock ->remove_cache_srcu ->batched_entropy_u8.lock ->&fsnotify_mark_srcu ->&xa->xa_lock#9 ->&____s->seqcount#2 ->&conn->lock ->&group->mark_mutex ->&mark->lock ->&group->notification_lock ->&group->notification_waitq ->&group->inotify_data.idr_lock ->destroy_lock ->&base->lock FD: 1 BD: 11 ++++: &xattrs->lock FD: 13 BD: 11 +.+.: &simple_offset_xa_lock ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->&n->list_lock ->&____s->seqcount#2 ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 35 BD: 114 +.+.: smack_known_lock ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock ->smack_known_lock.wait_lock ->&cfs_rq->removed.lock ->&meta->lock ->kfence_freelist_lock ->&base->lock ->quarantine_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&____s->seqcount FD: 31 BD: 3 +.+.: &sb->s_type->i_mutex_key#4 ->tk_core.seq.seqcount ->&rq->__lock ->tomoyo_ss ->&xattrs->lock ->&simple_offset_xa_lock ->&dentry->d_lock FD: 3 BD: 12 +.+.: subsys mutex#16 ->&k->k_lock FD: 1 BD: 2 +.+.: io_range_mutex FD: 1 BD: 2 ++++: pci_bus_sem FD: 3 BD: 2 +.+.: subsys mutex#17 ->&k->k_lock FD: 49 BD: 2 +.+.: acpi_hp_context_lock ->fs_reclaim ->pool_lock#2 FD: 1 BD: 2 +.+.: bridge_mutex FD: 1 BD: 13 ....: pci_lock FD: 1 BD: 2 .+.+: pci_acpi_companion_lookup_sem FD: 1 BD: 2 +.+.: pci_slot_mutex FD: 1 BD: 2 +.+.: resource_alignment_lock FD: 1 BD: 217 ....: &dev->power.lock/1 FD: 1 BD: 2 +.+.: subsys mutex#18 FD: 36 BD: 2 +.+.: pci_rescan_remove_lock FD: 64 BD: 4 +.+.: acpi_link_lock ->fs_reclaim ->pool_lock#2 ->semaphore->lock ->&obj_hash[i].lock ->*(&acpi_gbl_reference_count_lock) ->(console_sem).lock ->&c->lock ->&____s->seqcount ->&rq->__lock ->&cfs_rq->removed.lock FD: 1 BD: 2 +.+.: acpi_dep_list_lock FD: 1 BD: 2 +.+.: power_resource_list_lock FD: 75 BD: 1 +.+.: &type->s_umount_key#10/1 ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#9 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 37 BD: 2 +.+.: &sb->s_type->i_lock_key#9 ->&dentry->d_lock FD: 75 BD: 1 +.+.: &type->s_umount_key#11/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#10 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&c->lock ->&____s->seqcount ->&dentry->d_lock FD: 37 BD: 2 +.+.: &sb->s_type->i_lock_key#10 ->&dentry->d_lock FD: 164 BD: 95 ++++: &mm->mmap_lock ->reservation_ww_class_acquire ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->&vma->vm_lock->lock ->&mm->page_table_lock ->ptlock_ptr(ptdesc)#2 ->&anon_vma->rwsem ->mmu_notifier_invalidate_range_start ->lock#4 ->lock#5 ->&obj_hash[i].lock ->&mapping->i_mmap_rwsem ->&p->alloc_lock ->&lruvec->lru_lock ->tk_core.seq.seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&rq->__lock ->&mm->mmap_lock/1 ->&sem->wait_lock ->&p->pi_lock ->rcu_node_0 ->remove_cache_srcu ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&n->list_lock ->quarantine_lock ->&base->lock ->&cfs_rq->removed.lock ->&sb->s_type->i_lock_key ->&kcov->lock ->&rcu_state.expedited_wq ->key ->pcpu_lock ->percpu_counters_lock ->&folio_wait_table[i] ->stock_lock ->&____s->seqcount#2 ->&xa->xa_lock#9 ->&info->lock FD: 75 BD: 96 +.+.: reservation_ww_class_acquire ->reservation_ww_class_mutex FD: 74 BD: 97 +.+.: reservation_ww_class_mutex ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&xa->xa_lock#9 ->&sb->s_type->i_lock_key ->&info->lock ->lock#4 ->&rq->__lock ->free_vmap_area_lock ->vmap_area_lock ->init_mm.page_table_lock FD: 46 BD: 537 ++++: &mapping->i_mmap_rwsem ->&obj_hash[i].lock ->pool_lock#2 ->&anon_vma->rwsem ->&rq->__lock ->&sem->wait_lock ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->quarantine_lock ->&meta->lock ->kfence_freelist_lock ->&p->pi_lock FD: 1 BD: 558 +.+.: dma_fence_map FD: 1 BD: 455 ....: key FD: 1 BD: 1 +.+.: attribute_container_mutex FD: 25 BD: 14 +.+.: triggers_list_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 25 BD: 14 .+.+: leds_list_lock ->&rq->__lock FD: 137 BD: 2 ++++: (usb_notifier_list).rwsem ->fs_reclaim ->pool_lock#2 ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#57 ->mon_lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 1 BD: 1 +.+.: rc_map_lock FD: 1 BD: 1 +.+.: subsys mutex#19 FD: 2 BD: 8 +.+.: iommu_probe_device_lock ->iommu_device_lock FD: 1 BD: 9 +.+.: iommu_device_lock FD: 1 BD: 7 ....: (efi_runtime_lock).lock FD: 26 BD: 9 ....: &x->wait#12 ->&p->pi_lock FD: 29 BD: 1 +.+.: (wq_completion)efi_rts_wq ->(work_completion)(&efi_rts_work.work) FD: 28 BD: 2 +.+.: (work_completion)(&efi_rts_work.work) ->cpu_asid_lock ->efi_rt_lock ->&x->wait#12 FD: 1 BD: 3034 ....: cpu_asid_lock FD: 1 BD: 3 +.+.: efi_rt_lock FD: 1 BD: 6 ....: (efivars_lock).lock FD: 1 BD: 1 +.+.: devfreq_list_lock FD: 1 BD: 2 +.+.: &entry->access FD: 51 BD: 2 +.+.: info_mutex ->proc_subdir_lock ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->proc_inum_ida.xa_lock FD: 1 BD: 92 +.+.: kobj_ns_type_lock FD: 13 BD: 57 +.+.: &xa->xa_lock#4 ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 31 BD: 51 +.+.: subsys mutex#20 ->&k->k_lock ->&rq->__lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 FD: 16 BD: 163 ..-.: &dir->lock#2 ->&obj_hash[i].lock ->pool_lock#2 ->&____s->seqcount ->quarantine_lock ->&meta->lock ->kfence_freelist_lock ->&base->lock FD: 35 BD: 57 +.+.: dev_hotplug_mutex ->&dev->power.lock ->&rq->__lock ->&k->k_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 14 BD: 56 ++++: dev_base_lock ->&xa->xa_lock#4 FD: 1 BD: 1 +.+.: qdisc_mod_lock FD: 18 BD: 1 ++++: bt_proto_lock ->pool_lock#2 ->&dir->lock ->&obj_hash[i].lock ->chan_list_lock ->l2cap_sk_list.lock ->&sk->sk_peer_lock ->hci_sk_list.lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->&____s->seqcount#2 FD: 60 BD: 22 +.+.: hci_cb_list_lock ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->chan_list_lock ->&conn->ident_lock ->&base->lock ->&c->lock ->&list->lock#8 ->&conn->chan_lock ->&n->list_lock ->&____s->seqcount ->pool_lock ->(work_completion)(&(&conn->id_addr_timer)->work) ->&x->wait#3 ->&rq->__lock ->(work_completion)(&(&conn->info_timer)->work) ->remove_cache_srcu ->hci_cb_list_lock.wait_lock ->&cfs_rq->removed.lock ->&____s->seqcount#2 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 1 BD: 1 +.+.: mgmt_chan_list_lock FD: 1 BD: 71 ....: &list->lock#2 FD: 49 BD: 49 +.+.: rate_ctrl_mutex ->fs_reclaim ->pool_lock#2 FD: 2 BD: 1 +.+.: netlbl_domhsh_lock ->pool_lock#2 FD: 1 BD: 47 +.+.: netlbl_unlhsh_lock FD: 325 BD: 1 +.+.: misc_mtx ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&____s->seqcount ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#21 ->misc_minors_ida.xa_lock ->&cfs_rq->removed.lock ->pool_lock ->&base->lock ->&dir->lock ->rfkill_global_mutex ->nfc_index_ida.xa_lock ->pcpu_alloc_mutex ->cpu_hotplug_lock ->kthread_create_lock ->&x->wait ->wq_pool_mutex ->misc_mtx.wait_lock ->hrtimer_bases.lock ->nfc_devlist_mutex ->&k->k_lock ->llcp_devices_lock ->nl_table_lock ->nl_table_wait.lock ->&____s->seqcount#2 ->&n->list_lock ->stock_lock ->remove_cache_srcu ->quarantine_lock ->key ->pcpu_lock ->percpu_counters_lock ->nfc_devlist_mutex.wait_lock ->rcu_node_0 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&rcu_state.expedited_wq ->pcpu_alloc_mutex.wait_lock ->&meta->lock ->(console_sem).lock ->&wq->mutex ->qrtr_node_lock ->wq_mayday_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->wq_pool_mutex.wait_lock ->(wq_completion)nfc4_nci_tx_wq#92 ->(wq_completion)nfc4_nci_rx_wq#93 ->(wq_completion)nfc4_nci_cmd_wq#96 ->(wq_completion)nfc3_nci_rx_wq#341 ->(wq_completion)nfc3_nci_cmd_wq#346 ->(wq_completion)nfc17_nci_cmd_wq#6 ->(wq_completion)nfc3_nci_cmd_wq#382 ->(wq_completion)nfc2_nci_rx_wq#973 ->(wq_completion)nfc2_nci_cmd_wq#973 ->(wq_completion)nfc4_nci_rx_wq#126 ->(wq_completion)nfc4_nci_cmd_wq#129 ->(wq_completion)nfc3_nci_tx_wq#412 ->(wq_completion)nfc3_nci_rx_wq#414 ->(wq_completion)nfc3_nci_cmd_wq#420 ->(wq_completion)nfc5_nci_cmd_wq#24 ->(wq_completion)nfc3_nci_tx_wq#496 ->(wq_completion)nfc3_nci_rx_wq#498 ->(wq_completion)nfc3_nci_cmd_wq#504 ->(wq_completion)nfc5_nci_cmd_wq#28 ->(wq_completion)nfc5_nci_tx_wq#31 ->(wq_completion)nfc5_nci_rx_wq#31 ->(wq_completion)nfc5_nci_cmd_wq#33 ->(wq_completion)nfc5_nci_cmd_wq#38 ->(wq_completion)nfc5_nci_cmd_wq#39 ->(wq_completion)nfc3_nci_tx_wq#550 ->(wq_completion)nfc3_nci_rx_wq#552 ->(wq_completion)nfc3_nci_cmd_wq#558 ->(wq_completion)nfc3_nci_rx_wq#575 ->(wq_completion)nfc3_nci_cmd_wq#581 ->(wq_completion)nfc2_nci_tx_wq#1296 ->(wq_completion)nfc2_nci_rx_wq#1295 ->(wq_completion)nfc2_nci_cmd_wq#1295 ->(wq_completion)nfc3_nci_cmd_wq#586 ->(wq_completion)nfc2_nci_rx_wq#1329 ->(wq_completion)nfc2_nci_cmd_wq#1329 ->(wq_completion)nfc2_nci_cmd_wq#1334 ->(wq_completion)nfc3_nci_rx_wq#621 ->(wq_completion)nfc3_nci_cmd_wq#628 ->(wq_completion)nfc5_nci_cmd_wq#59 ->(wq_completion)nfc13_nci_cmd_wq#18 ->(wq_completion)nfc21_nci_tx_wq#9 ->(wq_completion)nfc21_nci_rx_wq#9 ->(wq_completion)nfc21_nci_cmd_wq#9 ->(wq_completion)nfc21_nci_cmd_wq#10 ->(wq_completion)nfc4_nci_cmd_wq#271 ->(wq_completion)nfc3_nci_cmd_wq#728 ->(wq_completion)nfc3_nci_cmd_wq#756 ->(wq_completion)nfc6_nci_tx_wq#40 ->(wq_completion)nfc6_nci_rx_wq#40 ->(wq_completion)nfc6_nci_cmd_wq#40 ->(wq_completion)nfc2_nci_cmd_wq#1527 ->(wq_completion)nfc5_nci_cmd_wq#85 ->(wq_completion)nfc6_nci_cmd_wq#43 ->(wq_completion)nfc5_nci_cmd_wq#91 ->(wq_completion)nfc3_nci_cmd_wq#778 FD: 3 BD: 2 +.+.: subsys mutex#21 ->&k->k_lock FD: 122 BD: 2 +.+.: (work_completion)(&rfkill_global_led_trigger_work) ->rfkill_global_mutex ->rfkill_global_mutex.wait_lock ->&p->pi_lock ->&rq->__lock FD: 121 BD: 13 +.+.: rfkill_global_mutex ->fs_reclaim ->pool_lock#2 ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->&rfkill->lock ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#40 ->triggers_list_lock ->leds_list_lock ->&rq->__lock ->rfkill_global_mutex.wait_lock ->&n->list_lock ->&data->mtx ->&____s->seqcount#2 ->quarantine_lock ->&cfs_rq->removed.lock ->&sem->wait_lock ->&p->pi_lock ->rcu_node_0 ->remove_cache_srcu ->key ->pcpu_lock ->percpu_counters_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->uevent_sock_mutex.wait_lock ->&rcu_state.expedited_wq ->stock_lock FD: 124 BD: 1 +.+.: input_mutex ->input_devices_poll_wait.lock ->fs_reclaim ->pool_lock#2 ->&dev->mutex#2 ->input_ida.xa_lock ->&x->wait#9 ->&obj_hash[i].lock ->chrdevs_lock ->&k->list_lock ->&c->lock ->&____s->seqcount ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#31 FD: 1 BD: 2 ....: input_devices_poll_wait.lock FD: 46 BD: 2 ++++: (netlink_chain).rwsem ->hwsim_radio_lock ->pool_lock#2 ->&obj_hash[i].lock ->reg_indoor_lock ->&rq->__lock ->&c->lock ->&n->list_lock ->rcu_node_0 ->batched_entropy_u8.lock ->kfence_freelist_lock ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock ->&____s->seqcount#2 ->&____s->seqcount ->stock_lock FD: 1 BD: 1 +.+.: proto_tab_lock FD: 3 BD: 1 ....: random_ready_notifier.lock ->crngs.lock FD: 1 BD: 2 ....: misc_minors_ida.xa_lock FD: 1 BD: 1 +.+.: wtd_deferred_reg_mutex FD: 75 BD: 1 +.+.: &type->s_umount_key#12/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#11 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 37 BD: 2 +.+.: &sb->s_type->i_lock_key#11 ->&dentry->d_lock FD: 94 BD: 1 +.+.: (work_completion)(&tracerfs_init_work) ->pin_fs_lock ->fs_reclaim ->pool_lock#2 ->sb_lock ->&c->lock ->&____s->seqcount ->&type->s_umount_key#13/1 ->&type->s_umount_key#14 ->mnt_id_ida.xa_lock ->pcpu_alloc_mutex ->&dentry->d_lock ->mount_lock ->&obj_hash[i].lock ->&fsnotify_mark_srcu ->&sb->s_type->i_mutex_key#5 ->event_mutex ->(module_notify_list).rwsem ->trace_types_lock FD: 79 BD: 2 +.+.: &type->s_umount_key#13/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&sb->s_type->i_lock_key#12 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock ->bit_wait_table + i ->&type->s_umount_key#14 FD: 37 BD: 6 +.+.: &sb->s_type->i_lock_key#12 ->&dentry->d_lock FD: 26 BD: 286 -.-.: bit_wait_table + i ->&p->pi_lock FD: 66 BD: 3 +.+.: &type->s_umount_key#14 ->sb_lock ->list_lrus_mutex ->&xa->xa_lock#5 ->&obj_hash[i].lock ->pool_lock#2 ->shrinker_mutex ->&rsp->gp_wait ->pcpu_lock ->fs_reclaim ->&dentry->d_lock ->&lru->node[i].lock FD: 10 BD: 292 ....: &xa->xa_lock#5 ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->pool_lock#2 ->&n->list_lock FD: 63 BD: 4 +.+.: &sb->s_type->i_mutex_key#5 ->&sb->s_type->i_lock_key#12 ->rename_lock.seqcount ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->&____s->seqcount ->&c->lock ->mmu_notifier_invalidate_range_start ->&s->s_inode_list_lock ->tk_core.seq.seqcount FD: 4 BD: 10 ..-.: &rsp->gp_wait ->&obj_hash[i].lock ->pool_lock#2 FD: 75 BD: 1 +.+.: &type->s_umount_key#15/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#13 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 52 BD: 120 .+.+: &fsnotify_mark_srcu ->&conn->lock ->fs_reclaim ->pool_lock#2 ->&group->notification_lock ->&group->notification_waitq ->&____s->seqcount ->&c->lock ->&rq->__lock ->&obj_hash[i].lock ->remove_cache_srcu ->&cfs_rq->removed.lock ->&n->list_lock ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->&____s->seqcount#2 ->rcu_node_0 FD: 37 BD: 2 +.+.: &sb->s_type->i_lock_key#13 ->&dentry->d_lock FD: 69 BD: 2 +.+.: event_mutex ->pin_fs_lock ->&sb->s_type->i_mutex_key#5 ->trace_event_sem ->trace_types_lock FD: 75 BD: 1 +.+.: &type->s_umount_key#16/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&c->lock ->&____s->seqcount ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#14 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 37 BD: 2 +.+.: &sb->s_type->i_lock_key#14 ->&dentry->d_lock FD: 1 BD: 5 +.+.: eventfs_mutex FD: 75 BD: 1 +.+.: &type->s_umount_key#17/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#15 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 37 BD: 2 +.+.: &sb->s_type->i_lock_key#15 ->&dentry->d_lock FD: 227 BD: 2 +.+.: timer_update_work ->timer_keys_mutex FD: 226 BD: 3 +.+.: timer_keys_mutex ->cpu_hotplug_lock FD: 50 BD: 1 +.+.: kclist_lock ->resource_lock ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount FD: 75 BD: 1 +.+.: &type->s_umount_key#18/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->&c->lock ->&____s->seqcount ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#16 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 37 BD: 2 +.+.: &sb->s_type->i_lock_key#16 ->&dentry->d_lock FD: 179 BD: 31 .+.+: tomoyo_ss ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->tomoyo_policy_lock ->(console_sem).lock ->&obj_hash[i].lock ->&dentry->d_lock ->tomoyo_log_lock ->tomoyo_log_wait.lock ->&c->lock ->&____s->seqcount ->&rq->__lock ->file_systems_lock ->fs_reclaim ->&mm->mmap_lock ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->remove_cache_srcu ->&n->list_lock ->rcu_node_0 ->rename_lock ->&cfs_rq->removed.lock ->&____s->seqcount#2 ->mount_lock ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->&rcu_state.expedited_wq ->&base->lock ->&fs->lock FD: 49 BD: 1 +.+.: pnp_lock ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: subsys mutex#22 FD: 3 BD: 1 +.+.: subsys mutex#23 ->&k->k_lock FD: 3 BD: 1 +.+.: subsys mutex#24 ->&k->k_lock FD: 4 BD: 1 +.+.: subsys mutex#25 ->&k->list_lock ->&k->k_lock FD: 1 BD: 1 ....: netevent_notif_chain.lock FD: 50 BD: 1 +.+.: clients_rwsem ->fs_reclaim ->clients.xa_lock FD: 2 BD: 2 +.+.: clients.xa_lock ->pool_lock#2 FD: 29 BD: 8 .+.+: devices_rwsem ->rcu_node_0 ->&rq->__lock ->&rcu_state.expedited_wq FD: 1 BD: 1 +.+.: (blocking_lsm_notifier_chain).rwsem FD: 114 BD: 47 ++++: (inetaddr_chain).rwsem ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->fib_info_lock ->&dir->lock#2 ->&____s->seqcount ->&c->lock ->nl_table_lock ->&obj_hash[i].lock ->nl_table_wait.lock ->&net->sctp.local_addr_lock ->rlock-AF_NETLINK ->&rq->__lock ->&n->list_lock ->rcu_node_0 ->remove_cache_srcu ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock ->quarantine_lock ->&ipvlan->addrs_lock ->&____s->seqcount#2 ->&tbl->lock ->class ->(&tbl->proxy_timer) ->&base->lock ->krc.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->pcpu_alloc_mutex.wait_lock ->&p->pi_lock ->mmu_notifier_invalidate_range_start ->&meta->lock ->stock_lock FD: 1 BD: 8 ....: inet6addr_chain.lock FD: 1 BD: 1 +.+.: buses_mutex FD: 1 BD: 1 +.+.: offload_lock FD: 1 BD: 1 +...: inetsw_lock FD: 58 BD: 2 +.+.: pcpu_balance_work ->pcpu_alloc_mutex ->pcpu_alloc_mutex.wait_lock ->&p->pi_lock FD: 1 BD: 49 +.+.: ptype_lock FD: 724 BD: 1 +.+.: (wq_completion)events_power_efficient ->(work_completion)(&(&tbl->managed_work)->work) ->(check_lifetime_work).work ->(work_completion)(&(&cache_cleaner)->work) ->(work_completion)(&(&ops->cursor_work)->work) ->(work_completion)(&(&hub->init_work)->work) ->(work_completion)(&(&gc_work->dwork)->work) ->(work_completion)(&(&tbl->gc_work)->work) ->(gc_work).work ->(crda_timeout).work ->&rq->__lock ->(reg_check_chans).work FD: 43 BD: 2 +.+.: (work_completion)(&(&tbl->managed_work)->work) ->&tbl->lock ->&rq->__lock FD: 42 BD: 115 +.-.: &tbl->lock ->&obj_hash[i].lock ->&base->lock ->&n->lock ->&c->lock ->&____s->seqcount ->pool_lock#2 ->nl_table_lock ->rlock-AF_NETLINK ->nl_table_wait.lock ->&dir->lock#2 ->krc.lock ->batched_entropy_u32.lock ->&____s->seqcount#2 ->&n->list_lock ->tk_core.seq.seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->quarantine_lock FD: 29 BD: 2 +.+.: (check_lifetime_work).work ->&obj_hash[i].lock ->&base->lock ->rcu_node_0 ->&rq->__lock ->&cfs_rq->removed.lock ->pool_lock#2 FD: 1 BD: 47 +.+.: &net->rules_mod_lock FD: 1 BD: 1 +.+.: tcp_ulp_list_lock FD: 1 BD: 1 +...: xfrm_state_afinfo_lock FD: 1 BD: 1 +.+.: xfrm_policy_afinfo_lock FD: 1 BD: 1 +...: xfrm_input_afinfo_lock FD: 16 BD: 541 ..-.: krc.lock ->&obj_hash[i].lock ->hrtimer_bases.lock ->&base->lock FD: 1 BD: 48 +...: k-slock-AF_INET/1 FD: 55 BD: 1 +.+.: (wq_completion)events_highpri ->(work_completion)(&(&krcp->page_cache_work)->work) ->(work_completion)(flush) ->(work_completion)(&barr->work) FD: 50 BD: 2 +.+.: (work_completion)(&(&krcp->page_cache_work)->work) ->fs_reclaim ->&____s->seqcount ->krc.lock FD: 1 BD: 2 +.+.: &hashinfo->lock FD: 1 BD: 1 +.+.: tcp_cong_list_lock FD: 1 BD: 1 +.+.: mptcp_sched_list_lock FD: 2 BD: 7 +.+.: cache_list_lock ->&cd->hash_lock FD: 27 BD: 2 +.+.: (work_completion)(&(&cache_cleaner)->work) ->cache_list_lock ->&obj_hash[i].lock ->&base->lock ->&rq->__lock FD: 1 BD: 1 +.+.: (rpc_pipefs_notifier_list).rwsem FD: 1 BD: 1 +.+.: svc_xprt_class_lock FD: 39 BD: 1 +.+.: xprt_list_lock ->(console_sem).lock FD: 131 BD: 3 ++++: umhelper_sem ->usermodehelper_disabled_waitq.lock ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->&k->k_lock ->subsys mutex#73 ->fw_lock ->&c->lock ->&____s->seqcount ->uevent_sock_mutex ->running_helpers_waitq.lock ->&x->wait#22 ->&base->lock ->&rq->__lock ->(&timer.timer) ->dev_pm_qos_sysfs_mtx ->kernfs_idr_lock ->deferred_probe_mutex ->device_links_lock ->mmu_notifier_invalidate_range_start FD: 1 BD: 4 ....: usermodehelper_disabled_waitq.lock FD: 27 BD: 387 +.+.: &dentry->d_lock/1 ->&lru->node[i].lock FD: 93 BD: 3 .+.+: sb_writers#2 ->mount_lock ->&sb->s_type->i_mutex_key/1 ->&sb->s_type->i_mutex_key FD: 89 BD: 4 +.+.: &sb->s_type->i_mutex_key/1 ->rename_lock.seqcount ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->&dentry->d_lock ->&obj_hash[i].lock ->tomoyo_ss ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#2 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&sb->s_type->i_mutex_key FD: 1 BD: 2 +.+.: tomoyo_log_lock FD: 1 BD: 2 ....: tomoyo_log_wait.lock FD: 55 BD: 129 +.+.: &wb->list_lock ->&sb->s_type->i_lock_key#2 ->&sb->s_type->i_lock_key#23 ->&sb->s_type->i_lock_key#22 ->&sb->s_type->i_lock_key ->&sb->s_type->i_lock_key#5 ->&sb->s_type->i_lock_key#24 ->&sb->s_type->i_lock_key#3 ->&sb->s_type->i_lock_key#27 FD: 139 BD: 2 +.+.: (work_completion)(&sub_info->work) ->&sighand->siglock ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->init_mm.page_table_lock ->batched_entropy_u64.lock ->&obj_hash[i].lock ->init_files.file_lock ->init_fs.lock ->&p->alloc_lock ->lock ->pidmap_lock ->cgroup_threadgroup_rwsem ->input_pool.lock ->&p->pi_lock ->&c->lock ->&rq->__lock ->&cfs_rq->removed.lock ->&n->list_lock ->rcu_node_0 ->&sig->wait_chldexit ->tasklist_lock ->&prev->lock ->&(&sig->stats_lock)->lock ->css_set_lock ->&x->wait#17 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&____s->seqcount#2 ->remove_cache_srcu ->&rcu_state.expedited_wq ->&meta->lock FD: 1 BD: 3 +.+.: &drv->dynids.lock FD: 1 BD: 1 +.+.: umh_sysctl_lock FD: 165 BD: 2 +.+.: &tsk->futex_exit_mutex ->&p->pi_lock ->&mm->mmap_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 28 BD: 1 +.+.: &child->perf_event_mutex ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 96 ....: &pid->wait_pidfd FD: 26 BD: 101 ....: &sig->wait_chldexit ->&p->pi_lock FD: 15 BD: 101 ....: &(&sig->stats_lock)->lock ->&____s->seqcount#4 FD: 14 BD: 103 ....: &____s->seqcount#4 ->pidmap_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 1 +.+.: subsys mutex#26 FD: 143 BD: 1 +.+.: subsys mutex#27 ->&k->list_lock ->&k->k_lock ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->platform_devid_ida.xa_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->&(&priv->bus_notifier)->rwsem ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#4 ->&rq->__lock ->wakeup_ida.xa_lock ->gdp_mutex ->subsys mutex#13 ->events_lock ->rtcdev_lock FD: 1 BD: 1 +.+.: subsys mutex#28 FD: 1 BD: 92 +.+.: subsys mutex#29 FD: 1 BD: 4 +.+.: key_user_lock FD: 1 BD: 4 +.+.: key_serial_lock FD: 5 BD: 5 +.+.: key_construction_mutex ->&obj_hash[i].lock ->pool_lock#2 ->keyring_name_lock FD: 56 BD: 3 +.+.: &type->lock_class ->keyring_serialise_link_lock ->fs_reclaim ->pool_lock#2 ->key_user_lock ->&____s->seqcount ->&c->lock ->crngs.lock ->key_serial_lock ->key_construction_mutex ->ima_keys_lock FD: 52 BD: 4 +.+.: keyring_serialise_link_lock ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->root_key_user.lock ->key_construction_mutex FD: 1 BD: 1 +.+.: drivers_lock FD: 92 BD: 1 +.+.: damon_dbgfs_lock ->fs_reclaim ->pool_lock#2 ->damon_ops_lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 FD: 1 BD: 1 ....: &pgdat->kswapd_wait FD: 75 BD: 1 +.+.: &type->s_umount_key#19/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#17 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 37 BD: 2 +.+.: &sb->s_type->i_lock_key#17 ->&dentry->d_lock FD: 75 BD: 1 +.+.: &type->s_umount_key#20/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 37 BD: 8 +.+.: &sb->s_type->i_lock_key#18 ->&dentry->d_lock FD: 1 BD: 1 +.+.: configfs_subsystem_mutex FD: 71 BD: 1 +.+.: &sb->s_type->i_mutex_key#6/1 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->&____s->seqcount ->&c->lock ->configfs_dirent_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&default_group_class[depth - 1]/2 ->&sb->s_type->i_mutex_key#7/2 ->&default_group_class[depth - 1]#3 FD: 1 BD: 9 +.+.: configfs_dirent_lock FD: 68 BD: 2 +.+.: &default_group_class[depth - 1]/2 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->configfs_dirent_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&default_group_class[depth - 1]#4/2 ->&c->lock ->&____s->seqcount FD: 49 BD: 1 +.+.: ecryptfs_daemon_hash_mux ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 ....: &ecryptfs_kthread_ctl.wait FD: 2 BD: 1 +.+.: ecryptfs_msg_ctx_lists_mux ->&ecryptfs_msg_ctx_arr[i].mux FD: 1 BD: 2 +.+.: &ecryptfs_msg_ctx_arr[i].mux FD: 1 BD: 1 +.+.: nfs_version_lock FD: 77 BD: 1 ++++: key_types_sem ->(console_sem).lock ->asymmetric_key_parsers_sem ->&type->lock_class ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 1 +.+.: pnfs_spinlock FD: 1 BD: 5 +.+.: &sn->pipefs_sb_lock FD: 1 BD: 1 +.+.: nls_lock FD: 38 BD: 1 +...: put_task_map-wait-type-override ->&obj_hash[i].lock ->pool_lock#2 ->quarantine_lock ->&base->lock ->task_group_lock ->stock_lock ->&meta->lock ->kfence_freelist_lock ->css_set_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 30 BD: 1 ..-.: &(&cache_cleaner)->timer FD: 1 BD: 1 +.+.: jffs2_compressor_list_lock FD: 1 BD: 1 +.+.: next_tag_value_lock FD: 13 BD: 1 +.-.: (&tcp_orphan_timer) ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 1 ....: log_redrive_lock FD: 2 BD: 1 ....: &TxAnchor.LazyLock ->jfs_commit_thread_wait.lock FD: 1 BD: 2 ....: jfs_commit_thread_wait.lock FD: 1 BD: 1 +.+.: jfsTxnLock FD: 39 BD: 1 +.+.: ocfs2_stack_lock ->(console_sem).lock FD: 1 BD: 1 +.+.: o2hb_callback_sem FD: 1 BD: 1 +.+.: o2net_handler_lock FD: 3 BD: 1 +.+.: subsys mutex#30 ->&k->k_lock FD: 75 BD: 1 +.+.: &type->s_umount_key#21/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&____s->seqcount ->&c->lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#19 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock ->&n->list_lock ->&rq->__lock FD: 37 BD: 5 +.+.: &sb->s_type->i_lock_key#19 ->&dentry->d_lock FD: 75 BD: 1 +.+.: &type->s_umount_key#22/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#20 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock ->&c->lock ->&____s->seqcount FD: 37 BD: 2 +.+.: &sb->s_type->i_lock_key#20 ->&dentry->d_lock FD: 1 BD: 1 +.+.: cipso_v4_doi_list_lock FD: 223 BD: 53 +.+.: nf_hook_mutex ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount ->stock_lock ->&____s->seqcount#2 ->&rq->__lock ->nf_hook_mutex.wait_lock ->cpu_hotplug_lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->remove_cache_srcu ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 49 BD: 1 +.+.: alg_types_sem ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: dma_list_mutex FD: 68 BD: 2 ++++: asymmetric_key_parsers_sem ->(console_sem).lock ->fs_reclaim ->pool_lock#2 ->crypto_alg_sem ->&obj_hash[i].lock ->(crypto_chain).rwsem ->&x->wait#20 ->&base->lock ->&rq->__lock ->(&timer.timer) ->&c->lock ->&____s->seqcount FD: 711 BD: 1 +.+.: blkcg_pol_register_mutex ->blkcg_pol_mutex ->cgroup_mutex FD: 1 BD: 2 +.+.: elv_list_lock FD: 53 BD: 1 +.+.: crc_t10dif_mutex ->crypto_alg_sem ->fs_reclaim ->pool_lock#2 FD: 53 BD: 1 +.+.: crc64_rocksoft_mutex ->crypto_alg_sem ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: ts_mod_lock FD: 1 BD: 1 +.+.: pci_ep_cfs_subsys.su_mutex FD: 64 BD: 1 +.+.: &default_group_class[depth - 1]#2/1 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->configfs_dirent_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&sb->s_type->i_mutex_key#7/2 FD: 1 BD: 3 +.+.: &sb->s_type->i_mutex_key#7/2 FD: 1 BD: 1 +.+.: pci_epf_mutex FD: 227 BD: 1 +.+.: ipmi_interfaces_mutex ->&k->list_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&k->k_lock ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock ->pcpu_alloc_mutex ->cpu_hotplug_lock ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&cfs_rq->removed.lock ->wq_pool_mutex ->&base->lock ->panic_notifier_list.lock FD: 36 BD: 2 +.+.: (work_completion)(&p->wq) ->vmap_area_lock ->&obj_hash[i].lock ->purge_vmap_area_lock ->pool_lock#2 ->&rq->__lock ->&cfs_rq->removed.lock ->quarantine_lock ->rcu_node_0 ->&base->lock ->&meta->lock ->kfence_freelist_lock ->&rcu_state.expedited_wq FD: 2 BD: 1 +.+.: smi_watchers_mutex ->&ipmi_interfaces_srcu FD: 1 BD: 3 .+.?: &ipmi_interfaces_srcu FD: 1 BD: 1 +.+.: smi_infos_lock FD: 4 BD: 108 ....: mask_lock ->tmp_mask_lock FD: 3 BD: 109 ....: tmp_mask_lock ->tmpmask_lock ->&its->lock FD: 26 BD: 1 ....: &desc->wait_for_threads ->&p->pi_lock FD: 3 BD: 2 +.+.: subsys mutex#31 ->&k->k_lock FD: 27 BD: 3 +.+.: &dev->mutex#2 ->&obj_hash[i].lock ->&x->wait#3 ->&rq->__lock FD: 109 BD: 1 +.+.: register_count_mutex ->&k->list_lock ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount ->lock ->&root->kernfs_rwsem ->&k->k_lock ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock FD: 1 BD: 1 ....: thermal_cdev_ida.xa_lock FD: 1 BD: 1 ....: cpufreq_driver_lock FD: 3 BD: 1 +.+.: subsys mutex#32 ->&k->k_lock FD: 49 BD: 1 +.+.: scmi_requested_devices_mtx ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount FD: 1 BD: 1 ....: virtio_index_ida.xa_lock FD: 1 BD: 1 +.+.: subsys mutex#33 FD: 1 BD: 1 +.+.: vdpa_dev_lock FD: 85 BD: 2 ++++: &type->i_mutex_dir_key#2 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->rename_lock.seqcount ->&c->lock ->&____s->seqcount ->namespace_sem ->&sem->wait_lock ->&rq->__lock ->&n->list_lock ->&xa->xa_lock#5 ->&obj_hash[i].lock ->stock_lock ->rcu_node_0 ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->remove_cache_srcu ->key ->pcpu_lock ->percpu_counters_lock ->tomoyo_ss ->&sbinfo->stat_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#5 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&xattrs->lock ->&simple_offset_xa_lock ->smack_known_lock ->&rcu_state.gp_wq ->&rcu_state.expedited_wq ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 3 BD: 1 +.+.: subsys mutex#34 ->&k->k_lock FD: 30 BD: 1 ..-.: lib/debugobjects.c:101 FD: 27 BD: 2 +.+.: (debug_obj_work).work ->pool_lock#2 ->&rq->__lock ->quarantine_lock ->&meta->lock ->kfence_freelist_lock FD: 25 BD: 9 +.+.: (work_completion)(&buf->work) ->&rq->__lock FD: 1 BD: 1 ....: rng_index_ida.xa_lock FD: 123 BD: 4 +.+.: &md->mutex ->pci_lock ->fs_reclaim ->pool_lock#2 ->&xa->xa_lock#6 ->&its->dev_alloc_lock ->&domain->mutex ->&irq_desc_lock_class ->tmpmask_lock ->&its->lock ->&root->kernfs_rwsem ->lock ->&obj_hash[i].lock ->sparse_irq_lock ->vmap_area_lock ->purge_vmap_area_lock ->&c->lock ->&____s->seqcount FD: 8 BD: 5 +.+.: &xa->xa_lock#6 ->&c->lock ->&____s->seqcount ->pool_lock#2 FD: 51 BD: 8 +.+.: &its->dev_alloc_lock ->&its->lock ->fs_reclaim ->&zone->lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->&____s->seqcount ->lpi_range_lock ->&c->lock FD: 1 BD: 110 ....: tmpmask_lock FD: 1 BD: 1 +.+.: &dev->vqs_list_lock FD: 1 BD: 1 ....: &vp_dev->lock FD: 52 BD: 1 +.+.: rng_mutex ->&x->wait#13 ->fs_reclaim ->pool_lock#2 ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&obj_hash[i].lock FD: 1 BD: 2 ....: &x->wait#13 FD: 26 BD: 2 -...: &x->wait#14 ->&p->pi_lock FD: 27 BD: 1 +.+.: reading_mutex ->&x->wait#14 ->&rq->__lock FD: 1 BD: 1 ....: &dev->config_lock FD: 2 BD: 1 +.-.: drivers/char/random.c:1010 ->input_pool.lock FD: 1 BD: 1 ....: &dev->managed.lock FD: 75 BD: 1 +.+.: &type->s_umount_key#23/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#21 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 37 BD: 2 +.+.: &sb->s_type->i_lock_key#21 ->&dentry->d_lock FD: 2 BD: 190 ....: drm_minor_lock ->pool_lock#2 FD: 3 BD: 3 +.+.: subsys mutex#35 ->&k->k_lock FD: 166 BD: 16 +.+.: &dev->mode_config.idr_mutex ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&file->master_lookup_lock ->&mm->mmap_lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&c->lock ->&cfs_rq->removed.lock FD: 179 BD: 12 +.+.: crtc_ww_class_acquire ->crtc_ww_class_mutex ->fs_reclaim ->pool_lock#2 ->&c->lock ->remove_cache_srcu ->&n->list_lock ->&rq->__lock ->&____s->seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock ->&____s->seqcount#2 ->&lock->wait_lock ->&p->pi_lock FD: 179 BD: 13 +.+.: crtc_ww_class_mutex ->reservation_ww_class_acquire ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->&dev->mode_config.idr_mutex ->&dev->mode_config.blob_lock ->&crtc->commit_lock ->reservation_ww_class_mutex ->tk_core.seq.seqcount ->&vkms_out->lock ->&dev->vbl_lock ->&x->wait#15 ->(work_completion)(&vkms_state->composer_work) ->&base->lock ->&rq->__lock ->(&timer.timer) ->(work_completion)(&vkms_state->composer_work)#2 ->&n->list_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->pool_lock ->key ->pcpu_lock ->percpu_counters_lock ->remove_cache_srcu ->&mm->mmap_lock ->&cfs_rq->removed.lock ->&____s->seqcount#2 ->&meta->lock ->kfence_freelist_lock ->stock_lock ->&lock->wait_lock ->batched_entropy_u8.lock ->quarantine_lock ->&file->master_lookup_lock ->&p->pi_lock FD: 1 BD: 14 +.+.: &dev->mode_config.blob_lock FD: 1 BD: 1 ....: &xa->xa_lock#7 FD: 1 BD: 1 ....: &xa->xa_lock#8 FD: 1 BD: 15 ....: &dev->mode_config.connector_list_lock FD: 18 BD: 17 ..-.: &dev->vbl_lock ->&dev->vblank_time_lock FD: 212 BD: 1 .+.+: drm_connector_list_iter ->&dev->mode_config.connector_list_lock ->fs_reclaim ->pool_lock#2 ->&connector->mutex ->&file->master_lookup_lock ->&mm->mmap_lock ->&c->lock ->&n->list_lock ->&rq->__lock ->&____s->seqcount#2 ->&____s->seqcount ->rcu_node_0 FD: 210 BD: 2 +.+.: &connector->mutex ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#35 ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->&dev->mode_config.idr_mutex ->connector_list_lock FD: 1 BD: 3 +.+.: connector_list_lock FD: 28 BD: 1 +.+.: &dev->filelist_mutex ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 245 BD: 1 +.+.: &dev->clientlist_mutex ->&helper->lock ->registration_lock ->(console_sem).lock ->kernel_fb_helper_lock ->&lock->wait_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 206 BD: 8 +.+.: &helper->lock ->fs_reclaim ->pool_lock#2 ->&client->modeset_mutex ->&obj_hash[i].lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&____s->seqcount ->&mgr->vm_lock ->&dev->object_name_lock ->&node->vm_lock ->&file_private->table_lock ->&dev->mode_config.idr_mutex ->&dev->mode_config.fb_lock ->&file->fbs_lock ->&prime_fpriv->lock ->free_vmap_area_lock ->vmap_area_lock ->init_mm.page_table_lock ->&c->lock ->&dev->master_mutex ->reservation_ww_class_mutex ->&lock->wait_lock ->&rq->__lock ->&p->pi_lock ->&cfs_rq->removed.lock FD: 183 BD: 10 +.+.: &client->modeset_mutex ->&dev->mode_config.mutex ->fs_reclaim ->pool_lock#2 ->crtc_ww_class_acquire ->&rq->__lock FD: 182 BD: 11 +.+.: &dev->mode_config.mutex ->crtc_ww_class_acquire ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&rq->__lock ->&lock->wait_lock ->&mm->mmap_lock ->&cfs_rq->removed.lock FD: 2 BD: 9 +.+.: &mgr->vm_lock ->pool_lock#2 FD: 40 BD: 9 +.+.: &dev->object_name_lock ->lock FD: 4 BD: 190 +.+.: &file_private->table_lock ->pool_lock#2 ->&obj_hash[i].lock FD: 4 BD: 9 +.+.: &node->vm_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 9 +.+.: &dev->mode_config.fb_lock FD: 25 BD: 9 +.+.: &file->fbs_lock ->&rq->__lock FD: 1 BD: 9 +.+.: &prime_fpriv->lock FD: 243 BD: 2 +.+.: registration_lock ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&rq->__lock ->&x->wait#11 ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#11 ->vt_switch_mutex ->(console_sem).lock ->console_lock FD: 49 BD: 3 +.+.: vt_switch_mutex ->fs_reclaim ->pool_lock#2 FD: 1 BD: 6 +.+.: &fb_info->lock FD: 1 BD: 6 ....: vt_event_lock FD: 192 BD: 9 +.+.: &dev->master_mutex ->&client->modeset_mutex ->fs_reclaim ->&c->lock ->pool_lock#2 ->&file->master_lookup_lock ->&obj_hash[i].lock ->&rq->__lock ->&n->list_lock ->&dev->mode_config.idr_mutex ->&lock->wait_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock ->quarantine_lock ->uevent_sock_mutex ->&____s->seqcount#2 ->&____s->seqcount ->remove_cache_srcu FD: 1 BD: 14 +.+.: &crtc->commit_lock FD: 22 BD: 283 -...: &xa->xa_lock#9 ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->key#11 ->&s->s_inode_wblist_lock ->&base->lock ->key#12 ->&wb->work_lock ->&n->list_lock ->stock_lock ->&xa->xa_lock#5 ->key#13 ->&pl->lock FD: 1 BD: 102 +.+.: &info->lock FD: 15 BD: 546 +.+.: lock#4 ->&lruvec->lru_lock ->&obj_hash[i].lock ->&base->lock FD: 2 BD: 547 ....: &lruvec->lru_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 33 BD: 14 -.-.: &vkms_out->lock ->&dev->event_lock FD: 32 BD: 15 -.-.: &dev->event_lock ->&dev->vbl_lock ->&____s->seqcount#5 ->&x->wait#15 ->&obj_hash[i].lock ->pool_lock#2 ->&dev->vblank_time_lock ->&vblank->queue ->&base->lock FD: 1 BD: 20 ----: &____s->seqcount#5 FD: 26 BD: 16 -.-.: &x->wait#15 ->&p->pi_lock FD: 17 BD: 18 -.-.: &dev->vblank_time_lock ->tk_core.seq.seqcount ->&(&vblank->seqlock)->lock ->&obj_hash[i].lock ->hrtimer_bases.lock FD: 2 BD: 19 -.-.: &(&vblank->seqlock)->lock ->&____s->seqcount#5 FD: 1 BD: 14 +.+.: (work_completion)(&vkms_state->composer_work) FD: 1 BD: 10 ....: &helper->damage_lock FD: 208 BD: 2 +.+.: (work_completion)(&helper->damage_work) ->&helper->damage_lock ->&helper->lock FD: 1 BD: 16 -.-.: &vblank->queue FD: 1 BD: 14 +.+.: (work_completion)(&vkms_state->composer_work)#2 FD: 26 BD: 195 +.+.: &lock->wait_lock ->&p->pi_lock FD: 1 BD: 2 +.+.: kernel_fb_helper_lock FD: 1 BD: 1 +.+.: drivers_lock#2 FD: 1 BD: 1 +.+.: devices_lock FD: 1 BD: 10 ....: blk_queue_ida.xa_lock FD: 2 BD: 12 +.+.: &xa->xa_lock#10 ->pool_lock#2 FD: 32 BD: 242 ....: &q->queue_lock ->&blkcg->lock ->pool_lock#2 ->pcpu_lock ->&obj_hash[i].lock ->percpu_counters_lock ->&c->lock ->&____s->seqcount ->&n->list_lock FD: 28 BD: 243 ....: &blkcg->lock ->pool_lock#2 ->percpu_ref_switch_lock FD: 1 BD: 14 +.+.: &bdev->bd_size_lock FD: 3 BD: 12 +.+.: subsys mutex#36 ->&k->k_lock FD: 248 BD: 8 +.+.: &q->sysfs_dir_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&c->lock ->&____s->seqcount ->&q->sysfs_lock ->&obj_hash[i].lock ->sysfs_symlink_target_lock ->&n->list_lock ->kernfs_idr_lock ->&____s->seqcount#2 ->&sem->wait_lock ->&p->pi_lock ->&rq->__lock FD: 247 BD: 9 +.+.: &q->sysfs_lock ->&q->debugfs_mutex ->&q->unused_hctx_lock ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->&obj_hash[i].lock ->cpu_hotplug_lock ->fs_reclaim ->&xa->xa_lock#11 ->pcpu_alloc_mutex ->&q->rq_qos_mutex ->&stats->lock ->&rq->__lock ->&cfs_rq->removed.lock ->&c->lock ->&____s->seqcount ->lock ->&root->kernfs_rwsem ->&n->list_lock FD: 89 BD: 11 +.+.: &q->debugfs_mutex ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->&dentry->d_lock ->&fsnotify_mark_srcu ->&sb->s_type->i_lock_key#7 ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->&obj_hash[i].lock ->pool_lock#2 ->mount_lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 27 BD: 248 ..-.: percpu_ref_switch_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 3 BD: 8 +.+.: subsys mutex#37 ->&k->k_lock FD: 1 BD: 9 ....: cgwb_lock FD: 1 BD: 8 +...: bdi_lock FD: 53 BD: 246 +.+.: inode_hash_lock ->&sb->s_type->i_lock_key#3 ->&sb->s_type->i_lock_key#22 ->&s->s_inode_list_lock ->&sb->s_type->i_lock_key#24 ->&sb->s_type->i_lock_key#30 ->&sb->s_type->i_lock_key#31 FD: 2 BD: 10 +.+.: bdev_lock ->&bdev->bd_holder_lock FD: 139 BD: 9 +.+.: &disk->open_mutex ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->init_mm.page_table_lock ->&xa->xa_lock#9 ->lock#4 ->mmu_notifier_invalidate_range_start ->&c->lock ->&mapping->i_private_lock ->tk_core.seq.seqcount ->&ret->b_uptodate_lock ->&obj_hash[i].lock ->purge_vmap_area_lock ->&sb->s_type->i_lock_key#3 ->lock#5 ->&lruvec->lru_lock ->&folio_wait_table[i] ->&rq->__lock ->&cfs_rq->removed.lock ->(console_sem).lock ->console_owner_lock ->console_owner ->&s->s_inode_list_lock ->pcpu_alloc_mutex ->&bdev->bd_size_lock ->&x->wait#9 ->ext_devt_ida.xa_lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&k->k_lock ->subsys mutex#36 ->&xa->xa_lock#10 ->inode_hash_lock ->bdev_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&lo->lo_mutex ->nbd_index_mutex ->&nbd->config_lock ->&new->lock ->&lock->wait_lock FD: 1 BD: 121 +.+.: &mapping->i_private_lock FD: 1 BD: 12 -...: &ret->b_uptodate_lock FD: 1 BD: 542 +.+.: lock#5 FD: 49 BD: 2 +.+.: loop_ctl_mutex ->fs_reclaim ->pool_lock#2 ->&rq->__lock FD: 1 BD: 10 +.+.: &q->unused_hctx_lock FD: 2 BD: 12 +.+.: &xa->xa_lock#11 ->pool_lock#2 FD: 1 BD: 7 +.+.: &set->tag_list_lock FD: 28 BD: 11 +.+.: &q->mq_freeze_lock ->percpu_ref_switch_lock ->&q->mq_freeze_wq ->&rq->__lock FD: 26 BD: 249 ..-.: &q->mq_freeze_wq ->&p->pi_lock FD: 97 BD: 10 +.+.: &q->rq_qos_mutex ->&q->mq_freeze_lock ->percpu_ref_switch_lock ->&q->debugfs_mutex ->set->srcu ->&stats->lock ->(&cb->timer) ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 FD: 1 BD: 11 ....: &stats->lock FD: 62 BD: 10 +.+.: nbd_index_mutex ->fs_reclaim ->pool_lock#2 ->&nbd->config_lock FD: 1 BD: 17 .+.+: set->srcu FD: 30 BD: 3 +.+.: (work_completion)(&(&q->requeue_work)->work) ->&q->requeue_lock ->&hctx->lock ->&__ctx->lock FD: 15 BD: 3 +.+.: (work_completion)(&(&hctx->run_work)->work) FD: 30 BD: 1 ..-.: &(&ops->cursor_work)->timer FD: 32 BD: 2 +.+.: (work_completion)(&(&ops->cursor_work)->work) ->(console_sem).lock ->&obj_hash[i].lock ->&base->lock FD: 265 BD: 1 +.+.: zram_index_mutex ->fs_reclaim ->pool_lock#2 ->blk_queue_ida.xa_lock ->&obj_hash[i].lock ->pcpu_alloc_mutex ->bio_slab_lock ->&c->lock ->&____s->seqcount ->percpu_counters_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#3 ->&s->s_inode_list_lock ->&xa->xa_lock#10 ->lock ->&q->queue_lock ->&x->wait#9 ->&bdev->bd_size_lock ->&k->list_lock ->gdp_mutex ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->subsys mutex#36 ->dev_hotplug_mutex ->&q->sysfs_dir_lock ->percpu_ref_switch_lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#37 ->cgwb_lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->bdi_lock ->inode_hash_lock ->(console_sem).lock FD: 3 BD: 1 +.+.: subsys mutex#38 ->&k->k_lock FD: 50 BD: 2 +.+.: &default_group_class[depth - 1]#3 ->fs_reclaim ->pool_lock#2 ->configfs_dirent_lock FD: 2 BD: 1 +.+.: &lock ->nullb_indexes.xa_lock FD: 1 BD: 2 ....: nullb_indexes.xa_lock FD: 7 BD: 2 ....: nfc_index_ida.xa_lock ->&c->lock ->pool_lock#2 FD: 138 BD: 4 +.+.: nfc_devlist_mutex ->fs_reclaim ->pool_lock#2 ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock ->subsys mutex#39 ->&k->k_lock ->&genl_data->genl_data_mutex ->dev_pm_qos_sysfs_mtx ->kernfs_idr_lock ->&x->wait#9 ->deferred_probe_mutex ->device_links_lock ->mmu_notifier_invalidate_range_start ->&____s->seqcount#2 ->quarantine_lock ->&sem->wait_lock ->&p->pi_lock ->&n->list_lock ->&rq->__lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->remove_cache_srcu ->nfc_devlist_mutex.wait_lock ->kn->active#4 ->&cfs_rq->removed.lock ->dev_pm_qos_sysfs_mtx.wait_lock ->uevent_sock_mutex.wait_lock ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 27 BD: 5 +.+.: subsys mutex#39 ->&k->k_lock ->&rq->__lock FD: 1 BD: 2 +.+.: llcp_devices_lock FD: 1 BD: 55 ....: &rfkill->lock FD: 27 BD: 14 +.+.: subsys mutex#40 ->&k->k_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 122 BD: 6 +.+.: (work_completion)(&rfkill->sync_work) ->rfkill_global_mutex ->rfkill_global_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 14 +.+.: rfkill_global_mutex.wait_lock FD: 2 BD: 1 +.+.: dma_heap_minors.xa_lock ->pool_lock#2 FD: 3 BD: 1 +.+.: subsys mutex#41 ->&k->k_lock FD: 1 BD: 1 +.+.: heap_list_lock FD: 4 BD: 1 +.+.: subsys mutex#42 ->&k->list_lock ->&k->k_lock FD: 1 BD: 1 +.+.: nvmf_hosts_mutex FD: 3 BD: 1 +.+.: subsys mutex#43 ->&k->k_lock FD: 1 BD: 1 +.+.: nvmf_transports_rwsem FD: 3 BD: 1 +.+.: subsys mutex#44 ->&k->k_lock FD: 67 BD: 3 +.+.: &default_group_class[depth - 1]#4/2 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->configfs_dirent_lock ->&c->lock ->&____s->seqcount ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&default_group_class[depth - 1]#5/2 FD: 1 BD: 1 +.+.: nvmet_config_sem FD: 3 BD: 1 +.+.: subsys mutex#45 ->&k->k_lock FD: 1 BD: 3 ....: nvme_instance_ida.xa_lock FD: 3 BD: 3 +.+.: subsys mutex#46 ->&k->k_lock FD: 94 BD: 3 +.+.: pools_reg_lock ->pools_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem FD: 1 BD: 4 +.+.: pools_lock FD: 66 BD: 4 +.+.: &default_group_class[depth - 1]#5/2 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->configfs_dirent_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&default_group_class[depth - 1]#6/2 FD: 65 BD: 5 +.+.: &default_group_class[depth - 1]#6/2 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->configfs_dirent_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#18 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&default_group_class[depth - 1]#7 ->&default_group_class[depth - 1]#7/2 FD: 50 BD: 6 +.+.: &default_group_class[depth - 1]#7 ->fs_reclaim ->&____s->seqcount ->pool_lock#2 ->configfs_dirent_lock FD: 1 BD: 6 +.+.: &default_group_class[depth - 1]#7/2 FD: 1 BD: 1 +.+.: backend_mutex FD: 1 BD: 1 +.+.: scsi_mib_index_lock FD: 1 BD: 1 +.+.: hba_lock FD: 49 BD: 1 +.+.: device_mutex ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: &hba->device_lock FD: 2 BD: 3 ....: &ctrl->lock ->&ctrl->state_wq FD: 1 BD: 4 ....: &ctrl->state_wq FD: 1 BD: 9 +.+.: &hctx->lock FD: 1 BD: 125 +.+.: &nvmeq->sq_lock FD: 26 BD: 6 ..-.: &x->wait#16 ->&p->pi_lock FD: 119 BD: 6 +.+.: nvme_subsystems_lock ->fs_reclaim ->pool_lock#2 ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock ->&rq->__lock ->&cfs_rq->removed.lock ->subsys mutex#47 FD: 3 BD: 7 +.+.: subsys mutex#47 ->&k->k_lock FD: 1 BD: 7 +.+.: &xa->xa_lock#12 FD: 1 BD: 1 +.+.: part_parser_lock FD: 281 BD: 1 +.+.: mtd_table_mutex ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&rq->__lock ->&x->wait#11 ->batched_entropy_u8.lock ->kfence_freelist_lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#48 ->devtree_lock ->nvmem_ida.xa_lock ->nvmem_cell_mutex ->&k->k_lock ->subsys mutex#49 ->nvmem_mutex ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->(console_sem).lock ->rcu_node_0 ->&rcu_state.expedited_wq ->pcpu_alloc_mutex ->batched_entropy_u32.lock ->mmu_notifier_invalidate_range_start ->blk_queue_ida.xa_lock ->&q->sysfs_lock ->&set->tag_list_lock ->bio_slab_lock ->percpu_counters_lock ->&sb->s_type->i_lock_key#3 ->&s->s_inode_list_lock ->&xa->xa_lock#10 ->&q->mq_freeze_lock ->set->srcu ->percpu_ref_switch_lock ->&q->queue_lock ->&bdev->bd_size_lock ->elv_list_lock ->(work_completion)(&(&q->requeue_work)->work) ->(work_completion)(&(&hctx->run_work)->work) ->&q->debugfs_mutex ->subsys mutex#36 ->dev_hotplug_mutex ->&q->sysfs_dir_lock ->subsys mutex#37 ->cgwb_lock ->bdi_lock ->inode_hash_lock FD: 1 BD: 1 +.+.: chip_drvs_lock FD: 126 BD: 1 +.+.: &dev->shutdown_lock ->&md->mutex ->&desc->request_mutex ->&obj_hash[i].lock ->pool_lock#2 ->pci_lock ->&rq->__lock ->fs_reclaim ->free_vmap_area_lock ->vmap_area_lock ->register_lock ->&irq_desc_lock_class ->proc_subdir_lock ->proc_inum_ida.xa_lock FD: 1 BD: 108 ....: irq_resend_lock FD: 1 BD: 108 +.+.: &ent->pde_unload_lock FD: 1 BD: 5 ....: (kmod_concurrent_max).lock FD: 26 BD: 7 ....: &x->wait#17 ->&p->pi_lock FD: 1 BD: 3 ....: &prev->lock FD: 30 BD: 2 +.+.: subsys mutex#48 ->rcu_node_0 ->&rcu_state.expedited_wq ->&rq->__lock ->&k->k_lock FD: 1 BD: 2 ....: nvmem_ida.xa_lock FD: 1 BD: 2 +.+.: nvmem_cell_mutex FD: 1 BD: 2 +.+.: subsys mutex#49 FD: 1 BD: 2 +.+.: nvmem_mutex FD: 25 BD: 6 ++++: &ctrl->namespaces_rwsem ->&rq->__lock FD: 295 BD: 3 +.+.: (wq_completion)nvme-wq ->(work_completion)(&ctrl->async_event_work) ->(work_completion)(&ctrl->scan_work) ->(work_completion)(&barr->work) FD: 293 BD: 4 +.+.: (work_completion)(&ctrl->scan_work) ->&ctrl->scan_lock FD: 2 BD: 4 +.+.: (work_completion)(&ctrl->async_event_work) ->&nvmeq->sq_lock FD: 292 BD: 5 +.+.: &ctrl->scan_lock ->fs_reclaim ->pool_lock#2 ->&hctx->lock ->&x->wait#16 ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->(&timer.timer) ->&ctrl->namespaces_rwsem ->blk_queue_ida.xa_lock ->pcpu_alloc_mutex ->&q->sysfs_lock ->&set->tag_list_lock ->bio_slab_lock ->percpu_counters_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#3 ->&s->s_inode_list_lock ->&xa->xa_lock#10 ->lock ->&q->mq_freeze_lock ->percpu_ref_switch_lock ->&q->queue_lock ->&x->wait#9 ->nvme_subsystems_lock ->&subsys->lock ->&c->lock ->&____s->seqcount ->&cfs_rq->removed.lock ->&bdev->bd_size_lock ->ext_devt_ida.xa_lock ->&k->list_lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&k->k_lock ->subsys mutex#36 ->dev_hotplug_mutex ->&q->sysfs_dir_lock ->gdp_mutex ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#37 ->cgwb_lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->bdi_lock ->inode_hash_lock ->bdev_lock ->&disk->open_mutex ->nvme_ns_chr_minor_ida.xa_lock ->chrdevs_lock ->subsys mutex#50 ->&dentry->d_lock ->quarantine_lock FD: 59 BD: 6 +.+.: &subsys->lock ->fs_reclaim ->pool_lock#2 ->&xa->xa_lock#12 ->&obj_hash[i].lock ->pcpu_alloc_mutex FD: 1 BD: 10 ....: ext_devt_ida.xa_lock FD: 26 BD: 529 -.-.: &folio_wait_table[i] ->&p->pi_lock FD: 1 BD: 47 +.+.: &bond->stats_lock FD: 33 BD: 61 ....: lweventlist_lock ->pool_lock#2 ->&dir->lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->&base->lock ->&____s->seqcount#2 ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 34 BD: 1 +.+.: (wq_completion)gid-cache-wq ->(work_completion)(&ndev_work->work) ->(work_completion)(&work->work) ->&rq->__lock FD: 32 BD: 2 +.+.: (work_completion)(&ndev_work->work) ->devices_rwsem ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock ->&meta->lock ->kfence_freelist_lock ->rcu_node_0 ->quarantine_lock ->&base->lock FD: 702 BD: 2 +.+.: (linkwatch_work).work ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 61 +.+.: rtnl_mutex.wait_lock FD: 3 BD: 48 ..-.: once_lock ->crngs.lock FD: 226 BD: 2 +.+.: (work_completion)(&w->work) ->cpu_hotplug_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 25 BD: 47 ++++: (inet6addr_validator_chain).rwsem ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 47 ++++: (inetaddr_validator_chain).rwsem ->&rq->__lock FD: 1 BD: 6 ....: nvme_ns_chr_minor_ida.xa_lock FD: 1 BD: 535 ....: &sem->wait_lock FD: 3 BD: 6 +.+.: subsys mutex#50 ->&k->k_lock FD: 3 BD: 1 +.+.: subsys mutex#51 ->&k->k_lock FD: 1 BD: 1 +.+.: gpio_lookup_lock FD: 1 BD: 1 +.+.: mdio_board_lock FD: 1 BD: 1 +.+.: mode_list_lock FD: 1 BD: 1 +.+.: l3mdev_lock FD: 1 BD: 114 -.-.: &retval->lock FD: 44 BD: 1 +.+.: (wq_completion)gve ->(work_completion)(&priv->service_task) FD: 43 BD: 2 +.+.: (work_completion)(&priv->service_task) ->(console_sem).lock ->lweventlist_lock ->&rq->__lock ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->&dir->lock#2 FD: 1 BD: 1 +.+.: hnae3_common_lock FD: 3 BD: 1 +.+.: subsys mutex#52 ->&k->k_lock FD: 2 BD: 1 +.+.: compressor_list_lock ->pool_lock#2 FD: 1 BD: 5 ....: hwsim_netgroup_ida.xa_lock FD: 34 BD: 76 +.-.: hwsim_radio_lock ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&list->lock#16 ->&____s->seqcount#2 ->&zone->lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->init_task.mems_allowed_seq.seqcount FD: 3 BD: 8 +.+.: subsys mutex#53 ->&k->k_lock FD: 51 BD: 48 +.+.: param_lock ->rate_ctrl_mutex ->disk_events_mutex FD: 368 BD: 50 +.+.: &rdev->wiphy.mtx ->fs_reclaim ->pool_lock#2 ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#54 ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->nl_table_lock ->nl_table_wait.lock ->reg_requests_lock ->stack_depot_init_mutex ->pcpu_alloc_mutex ->&xa->xa_lock#4 ->net_rwsem ->&x->wait#9 ->subsys mutex#20 ->&dir->lock#2 ->dev_hotplug_mutex ->dev_base_lock ->input_pool.lock ->batched_entropy_u32.lock ->&tbl->lock ->sysctl_lock ->&fq->lock ->&local->iflist_mtx ->rlock-AF_NETLINK ->&rdev->bss_lock ->&rq->__lock ->lweventlist_lock ->&base->lock ->&data->mutex ->&rdev->wiphy_work_lock ->&local->filter_lock ->quarantine_lock ->&tn->lock ->&sem->wait_lock ->&p->pi_lock ->failover_lock ->proc_subdir_lock ->proc_inum_ida.xa_lock ->&idev->mc_lock ->&pnettable->lock ->smc_ib_devices.mutex ->&n->list_lock ->&ndev->lock ->&wdev->event_lock ->&rdev->mgmt_registrations_lock ->(&dwork->timer) ->&dentry->d_lock ->&fsnotify_mark_srcu ->&sb->s_type->i_lock_key#7 ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->mount_lock ->(&dwork->timer)#2 ->(work_completion)(&(&link->color_collision_detect_work)->work) ->rtnl_mutex.wait_lock ->&list->lock#15 ->&ifibss->incomplete_lock ->(console_sem).lock ->console_owner_lock ->console_owner ->tk_core.seq.seqcount ->hrtimer_bases.lock ->&list->lock#2 ->pool_lock ->remove_cache_srcu ->&sta->lock ->lock#6 ->&____s->seqcount#2 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&cfs_rq->removed.lock ->&lock->wait_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&local->active_txq_lock[i] ->(work_completion)(&sta->drv_deliver_wk) ->&rcu_state.gp_wq ->&wdev->pmsr_lock ->&rnp->exp_wq[3] ->rcu_state.exp_mutex.wait_lock ->krc.lock ->&local->queue_stop_reason_lock ->&x->wait#3 ->(&ifibss->timer) ->_xmit_ETHER ->(&local->dynamic_ps_timer) ->(&dwork->timer)#3 ->&list->lock#16 ->&wq->mutex ->cpu_hotplug_lock ->bpf_devs_lock ->&in_dev->mc_tomb_lock ->class ->(&tbl->proxy_timer) ->&ul->lock ->&net->xdp.lock ->mirred_list_lock ->&nft_net->commit_mutex ->&ent->pde_unload_lock ->&net->ipv6.addrconf_hash_lock ->&idev->mc_query_lock ->(work_completion)(&(&idev->mc_report_work)->work) ->&idev->mc_report_lock ->&pnn->pndevs.lock ->&pnn->routes.lock ->target_list_lock ->kernfs_idr_lock ->dev_pm_qos_sysfs_mtx ->deferred_probe_mutex ->device_links_lock ->(&dwork->timer)#4 ->mmu_notifier_invalidate_range_start ->&ul->lock#2 ->key ->pcpu_lock ->percpu_counters_lock ->&rnp->exp_lock ->stock_lock ->(wq_completion)phy164 ->(wq_completion)phy163 ->(wq_completion)phy162 ->(wq_completion)phy161 ->(wq_completion)phy136 ->(wq_completion)phy135 FD: 3 BD: 51 +.+.: subsys mutex#54 ->&k->k_lock FD: 1 BD: 51 +.+.: reg_requests_lock FD: 7 BD: 75 +.-.: &fq->lock ->tk_core.seq.seqcount ->&obj_hash[i].lock ->&zone->lock ->pool_lock#2 FD: 1 BD: 51 +.+.: &local->iflist_mtx FD: 3 BD: 47 +.+.: subsys mutex#55 ->&k->k_lock FD: 2 BD: 48 +.+.: &sdata->sec_mtx ->&sec->lock FD: 1 BD: 49 +...: &sec->lock FD: 25 BD: 47 +.+.: &local->iflist_mtx#2 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 49 BD: 1 +.+.: hwsim_phys_lock ->fs_reclaim ->pool_lock#2 ->&____s->seqcount FD: 49 BD: 1 +.+.: xdomain_lock ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: ioctl_mutex FD: 1 BD: 1 +.+.: address_handler_list_lock FD: 1 BD: 1 +.+.: card_mutex FD: 3 BD: 1 +.+.: subsys mutex#56 ->&k->k_lock FD: 26 BD: 1 ....: &x->wait#18 ->&p->pi_lock FD: 28 BD: 2 ..-.: &txlock ->&list->lock#3 ->&txwq FD: 1 BD: 3 ..-.: &list->lock#3 FD: 26 BD: 3 ..-.: &txwq ->&p->pi_lock FD: 2 BD: 1 ....: &iocq[i].lock ->&ktiowq[i] FD: 1 BD: 2 ....: &ktiowq[i] FD: 1 BD: 1 ....: rcu_read_lock_bh FD: 1 BD: 59 +.-.: noop_qdisc.q.lock FD: 3 BD: 3 +.+.: subsys mutex#57 ->&k->k_lock FD: 176 BD: 1 +.+.: usb_bus_idr_lock ->(usb_notifier_list).rwsem ->fs_reclaim ->pool_lock#2 ->mmu_notifier_invalidate_range_start ->hcd_root_hub_lock ->&obj_hash[i].lock ->&rq->__lock ->&x->wait#19 ->&dev->power.lock ->device_links_srcu ->&____s->seqcount ->&c->lock ->(console_sem).lock ->input_pool.lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&k->k_lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&(&priv->bus_notifier)->rwsem ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#58 ->&x->wait#9 ->&vhci_hcd->vhci->lock ->&lock->wait_lock ->quarantine_lock ->&base->lock ->(&timer.timer) ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 118 BD: 1 +.+.: table_lock ->&k->list_lock ->fs_reclaim ->pool_lock#2 ->lock ->&root->kernfs_rwsem ->&k->k_lock ->uevent_sock_mutex ->&obj_hash[i].lock ->running_helpers_waitq.lock ->(console_sem).lock ->&c->lock ->&____s->seqcount ->&rq->__lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&base->lock FD: 2 BD: 1 +.-.: (&ipmi_timer) ->&ipmi_interfaces_srcu FD: 1 BD: 3 +.+.: mon_lock FD: 113 BD: 2 +.+.: usb_port_peer_mutex ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->&k->k_lock ->dev_pm_qos_mtx ->component_mutex ->device_links_srcu ->dev_pm_qos_sysfs_mtx ->&rq->__lock ->sysfs_symlink_target_lock FD: 1 BD: 9 ....: device_state_lock FD: 29 BD: 8 ....: hcd_root_hub_lock ->hcd_urb_list_lock ->&bh->lock ->&p->pi_lock ->device_state_lock FD: 1 BD: 9 ....: hcd_urb_list_lock FD: 1 BD: 9 ..-.: &bh->lock FD: 12 BD: 73 ..-.: lock#6 ->kcov_remote_lock ->&kcov->lock FD: 10 BD: 124 ..-.: kcov_remote_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&n->list_lock FD: 26 BD: 8 ..-.: &x->wait#19 ->&p->pi_lock FD: 1 BD: 2 +.+.: set_config_lock FD: 64 BD: 2 +.+.: hcd->bandwidth_mutex ->devtree_lock ->&obj_hash[i].lock ->&x->wait#9 ->&dev->power.lock ->fs_reclaim ->pool_lock#2 ->mmu_notifier_invalidate_range_start ->hcd_root_hub_lock ->&rq->__lock ->&x->wait#19 ->&base->lock ->(&timer.timer) ->&c->lock ->&____s->seqcount FD: 1 BD: 2 +.+.: &new_driver->dynids.lock FD: 1 BD: 7 ....: &dum_hcd->dum->lock FD: 56 BD: 4 +.+.: &hub->status_mutex ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->hcd_root_hub_lock ->fs_reclaim ->&dum_hcd->dum->lock ->&obj_hash[i].lock ->&rq->__lock ->&x->wait#19 ->&c->lock ->&____s->seqcount ->&base->lock ->(&timer.timer) ->&vhci_hcd->vhci->lock ->&cfs_rq->removed.lock ->&n->list_lock ->remove_cache_srcu ->quarantine_lock FD: 1 BD: 3 +.+.: component_mutex FD: 62 BD: 2 +.+.: (work_completion)(&(&hub->init_work)->work) ->&rq->__lock ->&lock->wait_lock ->&p->pi_lock FD: 1 BD: 2 +.+.: subsys mutex#58 FD: 38 BD: 1 +.+.: (wq_completion)usb_hub_wq ->(work_completion)(&hub->events) FD: 37 BD: 2 +.+.: (work_completion)(&hub->events) ->lock#6 ->&dev->power.lock ->&lock->wait_lock ->&p->pi_lock FD: 1 BD: 3 ....: &hub->irq_urb_lock FD: 1 BD: 3 ....: (&hub->irq_urb_retry) FD: 1 BD: 3 ....: hcd_urb_unlink_lock FD: 26 BD: 3 ..-.: usb_kill_urb_queue.lock ->&p->pi_lock FD: 1 BD: 3 +.+.: (work_completion)(&hub->tt.clear_work) FD: 1 BD: 12 +.+.: udc_lock FD: 3 BD: 1 +.+.: subsys mutex#59 ->&k->k_lock FD: 1 BD: 1 ....: gadget_id_numbers.xa_lock FD: 88 BD: 2 +.+.: (work_completion)(&gadget->work) ->&root->kernfs_rwsem ->kernfs_notify_lock FD: 30 BD: 108 ....: kernfs_notify_lock FD: 60 BD: 2 +.+.: kernfs_notify_work ->kernfs_notify_lock ->&root->kernfs_supers_rwsem FD: 54 BD: 7 ++++: &root->kernfs_supers_rwsem ->inode_hash_lock FD: 1 BD: 1 +.+.: subsys mutex#60 FD: 1 BD: 1 +.+.: func_lock FD: 1 BD: 1 +.+.: g_tf_lock FD: 1 BD: 7 ....: &vhci_hcd->vhci->lock FD: 2 BD: 2 ....: input_ida.xa_lock ->pool_lock#2 FD: 1 BD: 1 ....: &mousedev->mutex/1 FD: 30 BD: 4 ....: serio_event_lock ->pool_lock#2 FD: 58 BD: 1 +.+.: (wq_completion)events_long ->serio_event_work ->(work_completion)(&(&ipvs->defense_work)->work) ->(work_completion)(&(&br->gc_work)->work) ->(work_completion)(&br->mcast_gc_work) FD: 35 BD: 2 +.+.: serio_event_work ->serio_mutex FD: 34 BD: 3 +.+.: serio_mutex ->serio_event_lock ->&k->list_lock ->&k->k_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 1 ....: rtc_ida.xa_lock FD: 32 BD: 1 +.+.: &rtc->ops_lock ->(efi_runtime_lock).lock ->&obj_hash[i].lock ->&rq->__lock ->&x->wait#12 FD: 1 BD: 2 ....: platform_devid_ida.xa_lock FD: 1 BD: 2 ....: rtcdev_lock FD: 49 BD: 1 +.+.: g_smscore_deviceslock ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: cx231xx_devlist_mutex FD: 1 BD: 1 +.+.: em28xx_devlist_mutex FD: 1 BD: 1 ....: pvr2_context_sync_data.lock FD: 1 BD: 8 +.+.: i2c_dev_list_lock FD: 3 BD: 8 +.+.: subsys mutex#61 ->&k->k_lock FD: 1 BD: 1 +.+.: subsys mutex#62 FD: 211 BD: 2 +.+.: dvbdev_register_lock ->(console_sem).lock ->fs_reclaim ->pool_lock#2 ->minor_rwsem ->&xa->xa_lock#13 ->&mdev->graph_mutex ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->&k->k_lock ->subsys mutex#63 FD: 212 BD: 1 +.+.: frontend_mutex ->fs_reclaim ->pool_lock#2 ->(console_sem).lock ->dvbdev_register_lock FD: 1 BD: 3 +.+.: minor_rwsem FD: 2 BD: 3 ....: &xa->xa_lock#13 ->pool_lock#2 FD: 165 BD: 4 +.+.: &mdev->graph_mutex ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&mm->mmap_lock ->&rq->__lock FD: 3 BD: 3 +.+.: subsys mutex#63 ->&k->k_lock FD: 1 BD: 1 ....: &dmxdev->lock FD: 1 BD: 1 +.+.: &dvbdemux->mutex FD: 25 BD: 1 +.+.: media_devnode_lock ->&rq->__lock FD: 1 BD: 1 +.+.: subsys mutex#64 FD: 25 BD: 1 +.+.: videodev_lock ->&rq->__lock FD: 3 BD: 1 +.+.: subsys mutex#65 ->&k->k_lock FD: 1 BD: 1 +.+.: vimc_sensor:396:(&vsensor->hdl)->_lock FD: 1 BD: 1 +.+.: &v4l2_dev->lock FD: 1 BD: 1 +.+.: vimc_debayer:581:(&vdebayer->hdl)->_lock FD: 1 BD: 1 +.+.: vimc_lens:61:(&vlens->hdl)->_lock FD: 59 BD: 1 +.+.: vivid_ctrls:1606:(hdl_user_gen)->_lock ->vivid_ctrls:1620:(hdl_vid_cap)->_lock ->fs_reclaim ->pool_lock#2 ->vivid_ctrls:1622:(hdl_vid_out)->_lock ->&c->lock ->&____s->seqcount ->vivid_ctrls:1625:(hdl_vbi_cap)->_lock ->vivid_ctrls:1627:(hdl_vbi_out)->_lock ->vivid_ctrls:1630:(hdl_radio_rx)->_lock ->vivid_ctrls:1632:(hdl_radio_tx)->_lock ->vivid_ctrls:1634:(hdl_sdr_cap)->_lock ->vivid_ctrls:1636:(hdl_meta_cap)->_lock ->vivid_ctrls:1638:(hdl_meta_out)->_lock ->vivid_ctrls:1640:(hdl_tch_cap)->_lock ->&zone->lock ->&obj_hash[i].lock FD: 50 BD: 1 +.+.: vivid_ctrls:1608:(hdl_user_vid)->_lock ->vivid_ctrls:1620:(hdl_vid_cap)->_lock ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount FD: 53 BD: 1 +.+.: vivid_ctrls:1610:(hdl_user_aud)->_lock ->vivid_ctrls:1620:(hdl_vid_cap)->_lock ->fs_reclaim ->pool_lock#2 ->vivid_ctrls:1622:(hdl_vid_out)->_lock ->vivid_ctrls:1630:(hdl_radio_rx)->_lock ->vivid_ctrls:1632:(hdl_radio_tx)->_lock FD: 57 BD: 1 +.+.: vivid_ctrls:1612:(hdl_streaming)->_lock ->vivid_ctrls:1620:(hdl_vid_cap)->_lock ->fs_reclaim ->pool_lock#2 ->vivid_ctrls:1622:(hdl_vid_out)->_lock ->vivid_ctrls:1625:(hdl_vbi_cap)->_lock ->vivid_ctrls:1627:(hdl_vbi_out)->_lock ->vivid_ctrls:1634:(hdl_sdr_cap)->_lock ->vivid_ctrls:1636:(hdl_meta_cap)->_lock ->vivid_ctrls:1638:(hdl_meta_out)->_lock ->vivid_ctrls:1640:(hdl_tch_cap)->_lock ->&c->lock ->&____s->seqcount FD: 51 BD: 1 +.+.: vivid_ctrls:1614:(hdl_sdtv_cap)->_lock ->vivid_ctrls:1620:(hdl_vid_cap)->_lock ->fs_reclaim ->pool_lock#2 ->vivid_ctrls:1625:(hdl_vbi_cap)->_lock FD: 51 BD: 1 +.+.: vivid_ctrls:1616:(hdl_loop_cap)->_lock ->vivid_ctrls:1620:(hdl_vid_cap)->_lock ->fs_reclaim ->pool_lock#2 ->vivid_ctrls:1625:(hdl_vbi_cap)->_lock FD: 1 BD: 1 +.+.: vivid_ctrls:1618:(hdl_fb)->_lock FD: 1 BD: 7 +.+.: vivid_ctrls:1620:(hdl_vid_cap)->_lock FD: 1 BD: 4 +.+.: vivid_ctrls:1622:(hdl_vid_out)->_lock FD: 1 BD: 5 +.+.: vivid_ctrls:1625:(hdl_vbi_cap)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1627:(hdl_vbi_out)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1630:(hdl_radio_rx)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1632:(hdl_radio_tx)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1634:(hdl_sdr_cap)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1636:(hdl_meta_cap)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1638:(hdl_meta_out)->_lock FD: 1 BD: 3 +.+.: vivid_ctrls:1640:(hdl_tch_cap)->_lock FD: 1 BD: 1 ....: &adap->kthread_waitq FD: 1 BD: 1 +.+.: &dev->cec_xfers_slock FD: 1 BD: 1 ....: &dev->kthread_waitq_cec FD: 1 BD: 1 +.+.: cec_devnode_lock FD: 1 BD: 1 +.+.: subsys mutex#66 FD: 5 BD: 1 +.+.: &adap->lock ->tk_core.seq.seqcount ->&adap->devnode.lock_fhs FD: 1 BD: 2 +.+.: &adap->devnode.lock_fhs FD: 1 BD: 1 ....: ptp_clocks_map.xa_lock FD: 3 BD: 1 +.+.: subsys mutex#67 ->&k->k_lock FD: 1 BD: 1 +.+.: pers_lock FD: 1 BD: 1 +.+.: _lock FD: 1 BD: 3 +.+.: dm_bufio_clients_lock FD: 1 BD: 1 +.+.: _ps_lock FD: 1 BD: 1 +.+.: _lock#2 FD: 1 BD: 1 +.+.: _lock#3 FD: 1 BD: 1 +.+.: register_lock#2 FD: 3 BD: 1 +.+.: subsys mutex#68 ->&k->k_lock FD: 1 BD: 1 .+.+: bp_lock FD: 3 BD: 1 +.+.: subsys mutex#69 ->&k->k_lock FD: 15 BD: 1 +.-.: (&dsp_spl_tl) ->dsp_lock FD: 14 BD: 2 ..-.: dsp_lock ->iclock_lock ->&obj_hash[i].lock ->&base->lock FD: 4 BD: 3 ...-: iclock_lock ->tk_core.seq.seqcount FD: 50 BD: 47 +.+.: lock#7 ->fs_reclaim ->pool_lock#2 ->&xa->xa_lock#15 FD: 1 BD: 1 ....: iscsi_transport_lock FD: 3 BD: 1 +.+.: subsys mutex#70 ->&k->k_lock FD: 1 BD: 1 +.+.: link_ops_rwsem FD: 1 BD: 1 ....: &tx_task->waiting FD: 129 BD: 1 +.+.: disable_lock ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&k->k_lock ->&dev->power.lock ->dpm_list_mtx ->&(&priv->bus_notifier)->rwsem ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#4 FD: 3 BD: 1 +.+.: protocol_lock ->&____s->seqcount ->pool_lock#2 FD: 72 BD: 1 +.+.: psinfo_lock ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->init_mm.page_table_lock ->(console_sem).lock ->&rq->__lock ->pstore_sb_lock ->dump_list_lock FD: 58 BD: 3 +.+.: pstore_sb_lock ->&sb->s_type->i_mutex_key#12 FD: 1 BD: 2 ....: dump_list_lock FD: 1 BD: 1 +.+.: vsock_register_mutex FD: 1 BD: 1 +.+.: comedi_drivers_list_lock FD: 104 BD: 1 +.+.: cscfg_mutex ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->&dev->power.lock ->dpm_list_mtx FD: 1 BD: 536 +.+.: icc_bw_lock FD: 3 BD: 6 +.+.: subsys mutex#71 ->&k->k_lock FD: 108 BD: 2 ++++: snd_ctl_layer_rwsem ->snd_ctl_led_mutex ->fs_reclaim ->pool_lock#2 ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->&k->k_lock ->sysfs_symlink_target_lock FD: 1 BD: 3 +.+.: snd_card_mutex FD: 1 BD: 1 +.+.: snd_ioctl_rwsem FD: 49 BD: 2 +.+.: strings ->fs_reclaim ->pool_lock#2 ->&c->lock ->&____s->seqcount FD: 1 BD: 2 +.+.: register_mutex FD: 121 BD: 3 +.+.: sound_mutex ->fs_reclaim ->pool_lock#2 ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&rq->__lock ->&x->wait#11 ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#71 ->&cfs_rq->removed.lock ->&k->k_lock FD: 131 BD: 1 +.+.: register_mutex#2 ->fs_reclaim ->pool_lock#2 ->sound_mutex ->&obj_hash[i].lock ->register_mutex ->&c->lock ->&____s->seqcount ->sound_oss_mutex ->strings ->&entry->access ->info_mutex FD: 123 BD: 1 +.+.: register_mutex#3 ->fs_reclaim ->pool_lock#2 ->sound_mutex ->clients_lock FD: 1 BD: 5 ....: clients_lock FD: 2 BD: 1 +.+.: &client->ports_mutex ->&client->ports_lock FD: 1 BD: 5 .+.+: &client->ports_lock FD: 124 BD: 1 +.+.: register_mutex#4 ->fs_reclaim ->pool_lock#2 ->sound_oss_mutex FD: 123 BD: 3 +.+.: sound_oss_mutex ->fs_reclaim ->pool_lock#2 ->sound_loader_lock ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&____s->seqcount ->&dev->power.lock ->dpm_list_mtx ->req_lock ->&p->pi_lock ->&x->wait#11 ->&rq->__lock ->uevent_sock_mutex ->running_helpers_waitq.lock ->subsys mutex#71 ->&k->k_lock ->&cfs_rq->removed.lock FD: 1 BD: 4 +.+.: sound_loader_lock FD: 52 BD: 1 .+.+: &grp->list_mutex/1 ->clients_lock ->&client->ports_lock ->register_lock#3 ->fs_reclaim ->&____s->seqcount ->pool_lock#2 ->&c->lock FD: 2 BD: 1 +.+.: &grp->list_mutex#2 ->&grp->list_lock FD: 1 BD: 2 ....: &grp->list_lock FD: 63 BD: 2 +.+.: async_lookup_work ->fs_reclaim ->pool_lock#2 ->clients_lock ->&client->ports_lock ->snd_card_mutex ->(kmod_concurrent_max).lock ->&obj_hash[i].lock ->&x->wait#17 ->&rq->__lock ->running_helpers_waitq.lock ->autoload_work ->&x->wait#10 FD: 1 BD: 2 ....: register_lock#3 FD: 4 BD: 3 +.+.: autoload_work ->&k->list_lock ->&k->k_lock FD: 111 BD: 1 ++++: &card->controls_rwsem ->&xa->xa_lock#14 ->fs_reclaim ->&card->ctl_files_rwlock ->snd_ctl_layer_rwsem ->pool_lock#2 ->&c->lock ->&____s->seqcount FD: 9 BD: 2 +.+.: &xa->xa_lock#14 ->pool_lock#2 ->&c->lock ->&____s->seqcount FD: 1 BD: 3 ....: &card->ctl_files_rwlock FD: 1 BD: 3 +.+.: snd_ctl_led_mutex FD: 1 BD: 1 +.+.: register_mutex#5 FD: 50 BD: 1 +.+.: client_mutex ->fs_reclaim ->pool_lock#2 ->&dev->devres_lock FD: 1 BD: 51 +.+.: failover_lock FD: 2 BD: 2 +...: llc_sap_list_lock ->pool_lock#2 FD: 49 BD: 1 +.+.: act_id_mutex ->fs_reclaim ->pool_lock#2 FD: 1 BD: 1 +.+.: act_mod_lock FD: 1 BD: 1 +.+.: ife_mod_lock FD: 1 BD: 1 +.+.: cls_mod_lock FD: 1 BD: 1 +.+.: ematch_mod_lock FD: 1 BD: 1 +.+.: sock_diag_table_mutex FD: 1 BD: 1 +.+.: nfnl_subsys_acct FD: 1 BD: 1 +.+.: nfnl_subsys_queue FD: 1 BD: 1 +.+.: nfnl_subsys_ulog FD: 1 BD: 5 +.+.: nf_log_mutex FD: 1 BD: 1 +.+.: nfnl_subsys_osf FD: 33 BD: 1 +.+.: nf_sockopt_mutex ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->nf_sockopt_mutex.wait_lock ->key ->pcpu_lock ->percpu_counters_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock FD: 1 BD: 1 +.+.: nfnl_subsys_ctnetlink FD: 1 BD: 1 +.+.: nfnl_subsys_ctnetlink_exp FD: 1 BD: 5 +.+.: nf_ct_ecache_mutex FD: 1 BD: 1 +.+.: nfnl_subsys_cttimeout FD: 1 BD: 1 +.+.: nfnl_subsys_cthelper FD: 1 BD: 1 +.+.: nf_ct_helper_mutex FD: 1 BD: 1 +...: nf_conntrack_expect_lock FD: 1 BD: 1 +.+.: nf_ct_nat_helpers_mutex FD: 1 BD: 1 +.+.: nfnl_subsys_nftables FD: 1 BD: 1 +.+.: nfnl_subsys_nftcompat FD: 861 BD: 1 +.+.: masq_mutex ->pernet_ops_rwsem ->(inetaddr_chain).rwsem ->inet6addr_chain.lock FD: 167 BD: 5 +.+.: &xt[i].mutex ->fs_reclaim ->pool_lock#2 ->&____s->seqcount ->&mm->mmap_lock ->free_vmap_area_lock ->vmap_area_lock ->&per_cpu(xt_recseq, i) ->&obj_hash[i].lock ->purge_vmap_area_lock ->&c->lock ->&____s->seqcount#2 ->&n->list_lock ->init_mm.page_table_lock ->&rq->__lock ->rcu_node_0 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock ->&lock->wait_lock ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->quarantine_lock ->&rcu_state.expedited_wq ->pool_lock ->&base->lock FD: 25 BD: 67 +.+.: &tn->lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 3 BD: 1 +.+.: subsys mutex#72 ->&k->k_lock FD: 25 BD: 5 +.+.: nfnl_subsys_ipset ->&rq->__lock FD: 1 BD: 1 +.+.: ip_set_type_mutex FD: 58 BD: 5 +.+.: ipvs->est_mutex ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->&c->lock ->&____s->seqcount ->&n->list_lock ->pcpu_lock ->&obj_hash[i].lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->pcpu_alloc_mutex.wait_lock ->&p->pi_lock ->quarantine_lock ->&rq->__lock FD: 1 BD: 1 +.+.: ip_vs_sched_mutex FD: 49 BD: 5 +.+.: __ip_vs_app_mutex ->fs_reclaim ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->&n->list_lock ->&rq->__lock FD: 1 BD: 1 +.+.: ip_vs_pe_mutex FD: 1 BD: 1 +.+.: tunnel4_mutex FD: 1 BD: 1 +.+.: xfrm4_protocol_mutex FD: 1 BD: 1 +.+.: inet_diag_table_mutex FD: 1 BD: 1 +...: xfrm_km_lock FD: 1 BD: 1 +.+.: xfrm6_protocol_mutex FD: 1 BD: 1 +.+.: tunnel6_mutex FD: 1 BD: 1 +.+.: xfrm_if_cb_lock FD: 1 BD: 1 +...: inetsw6_lock FD: 1 BD: 6 +.+.: &hashinfo->lock#2 FD: 17 BD: 5 +.+.: &net->ipv6.ip6addrlbl_table.lock ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock FD: 133 BD: 54 +.+.: &idev->mc_lock ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&dev_addr_list_lock_key ->&c->lock ->&____s->seqcount ->_xmit_ETHER ->batched_entropy_u32.lock ->&base->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->krc.lock ->&n->list_lock ->&bridge_netdev_addr_lock_key ->rcu_node_0 ->&rq->__lock ->&lock->wait_lock ->&dev_addr_list_lock_key#2 ->&batadv_netdev_addr_lock_key ->remove_cache_srcu ->&vlan_netdev_addr_lock_key ->&macvlan_netdev_addr_lock_key ->&dev_addr_list_lock_key#3 ->&bridge_netdev_addr_lock_key/1 ->&dev_addr_list_lock_key/1 ->pcpu_lock ->&dev_addr_list_lock_key#2/1 ->_xmit_ETHER/1 ->&batadv_netdev_addr_lock_key/1 ->&vlan_netdev_addr_lock_key/1 ->&macvlan_netdev_addr_lock_key/1 ->&dev_addr_list_lock_key#3/1 ->&macsec_netdev_addr_lock_key/1 ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->key ->percpu_counters_lock ->&rcu_state.expedited_wq FD: 8 BD: 55 +...: &dev_addr_list_lock_key ->pool_lock#2 ->&c->lock ->&n->list_lock FD: 42 BD: 67 +...: _xmit_ETHER ->&c->lock ->&____s->seqcount ->&local->filter_lock ->&rdev->wiphy_work_lock ->pool_lock#2 ->(console_sem).lock ->console_owner_lock ->console_owner ->&____s->seqcount#2 ->&obj_hash[i].lock ->krc.lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 704 BD: 1 +.+.: (wq_completion)ipv6_addrconf ->(work_completion)(&(&net->ipv6.addr_chk_work)->work) ->(work_completion)(&(&ifa->dad_work)->work) ->&rq->__lock FD: 702 BD: 6 +.+.: (work_completion)(&(&net->ipv6.addr_chk_work)->work) ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock FD: 26 BD: 3 ....: &x->wait#20 ->&p->pi_lock FD: 47 BD: 81 ++--: &ndev->lock ->&ifa->lock ->pool_lock#2 ->&____s->seqcount ->&dir->lock#2 ->pcpu_lock ->&tb->tb6_lock ->&c->lock ->&obj_hash[i].lock ->&n->list_lock ->&____s->seqcount#2 ->batched_entropy_u32.lock ->&base->lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 3 BD: 1 +.+.: stp_proto_mutex ->llc_sap_list_lock FD: 1 BD: 1 ....: switchdev_notif_chain.lock FD: 25 BD: 47 ++++: (switchdev_blocking_notif_chain).rwsem ->&rq->__lock FD: 703 BD: 1 +.+.: br_ioctl_mutex ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->br_ioctl_mutex.wait_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 227 BD: 8 +.+.: nf_ct_proto_mutex ->defrag4_mutex ->nf_hook_mutex ->cpu_hotplug_lock ->&obj_hash[i].lock ->pool_lock#2 ->defrag6_mutex ->&rq->__lock FD: 166 BD: 5 +.+.: ebt_mutex ->fs_reclaim ->pool_lock#2 ->&mm->mmap_lock ->&c->lock ->&rq->__lock ->&n->list_lock ->ebt_mutex.wait_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: dsa_tag_drivers_lock FD: 1 BD: 1 +...: protocol_list_lock FD: 1 BD: 1 +...: linkfail_lock FD: 1 BD: 1 +...: rose_neigh_list_lock FD: 1 BD: 1 +.+.: proto_tab_lock#2 FD: 1 BD: 25 ++++: chan_list_lock FD: 1 BD: 2 +.+.: l2cap_sk_list.lock FD: 3 BD: 1 +.+.: sk_lock-AF_BLUETOOTH-BTPROTO_L2CAP ->slock-AF_BLUETOOTH-BTPROTO_L2CAP ->chan_list_lock FD: 1 BD: 2 +...: slock-AF_BLUETOOTH-BTPROTO_L2CAP FD: 1 BD: 1 ....: rfcomm_wq.lock FD: 1 BD: 1 +.+.: rfcomm_mutex FD: 1 BD: 1 +.+.: auth_domain_lock FD: 1 BD: 1 +.+.: registered_mechs_lock FD: 1 BD: 1 ....: atm_dev_notify_chain.lock FD: 1 BD: 1 +.+.: proto_tab_lock#3 FD: 702 BD: 1 +.+.: vlan_ioctl_mutex ->&mm->mmap_lock ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock FD: 1 BD: 1 +.+.: rds_info_lock FD: 39 BD: 1 +.+.: rds_trans_sem ->(console_sem).lock FD: 1 BD: 1 ....: &id_priv->lock FD: 2 BD: 48 +.+.: &xa->xa_lock#15 ->pool_lock#2 FD: 50 BD: 50 +.+.: k-sk_lock-AF_INET6 ->k-slock-AF_INET6 ->&tcp_hashinfo.bhash[i].lock ->&h->lhash2[i].lock ->&table->hash[i].lock ->k-clock-AF_INET6 ->&icsk->icsk_accept_queue.rskq_lock#2 ->&obj_hash[i].lock ->&rq->__lock FD: 29 BD: 52 +.-.: k-slock-AF_INET6 ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->&tcp_hashinfo.bhash[i].lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount ->crngs.lock ->quarantine_lock FD: 1 BD: 72 ++..: k-clock-AF_INET6 FD: 15 BD: 67 +.-.: &tcp_hashinfo.bhash[i].lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&tcp_hashinfo.bhash2[i].lock ->k-clock-AF_INET6 ->clock-AF_INET ->clock-AF_INET6 ->&obj_hash[i].lock FD: 14 BD: 68 +.-.: &tcp_hashinfo.bhash2[i].lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->k-clock-AF_INET6 ->clock-AF_INET ->clock-AF_INET6 ->&obj_hash[i].lock ->batched_entropy_u8.lock ->&hashinfo->ehash_locks[i] FD: 1 BD: 53 +.+.: &h->lhash2[i].lock FD: 1 BD: 5 +...: &list->lock#4 FD: 1 BD: 6 +...: k-clock-AF_TIPC FD: 34 BD: 5 +.+.: k-sk_lock-AF_TIPC ->k-slock-AF_TIPC ->&tn->nametbl_lock ->&rq->__lock ->&obj_hash[i].lock ->k-clock-AF_TIPC FD: 1 BD: 6 +...: k-slock-AF_TIPC FD: 19 BD: 6 +...: &tn->nametbl_lock ->pool_lock#2 ->&service->lock ->&c->lock ->&nt->cluster_scope_lock ->&obj_hash[i].lock ->krc.lock ->&n->list_lock FD: 17 BD: 7 +...: &service->lock ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->krc.lock ->&n->list_lock FD: 25 BD: 51 +.+.: &pnettable->lock ->&rq->__lock FD: 25 BD: 51 +.+.: smc_ib_devices.mutex ->&rq->__lock FD: 1 BD: 1 +.+.: smc_wr_rx_hash_lock FD: 1 BD: 1 +.+.: v9fs_trans_lock FD: 1 BD: 5 +...: &this->receive_lock FD: 1 BD: 1 +...: lowpan_nhc_lock FD: 233 BD: 7 +.+.: ovs_mutex ->(work_completion)(&data->gc_work) ->nf_ct_proto_mutex ->&obj_hash[i].lock ->pool_lock#2 ->net_rwsem ->quarantine_lock FD: 225 BD: 9 +.+.: defrag4_mutex ->nf_hook_mutex ->cpu_hotplug_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 225 BD: 9 +.+.: defrag6_mutex ->nf_hook_mutex ->cpu_hotplug_lock ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock FD: 35 BD: 2 +.+.: drain_vmap_work ->vmap_purge_lock FD: 1 BD: 6 +.+.: ima_keys_lock FD: 52 BD: 90 +.+.: scomp_lock ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->init_mm.page_table_lock FD: 1 BD: 525 +.+.: &mm->page_table_lock FD: 32 BD: 525 +.+.: ptlock_ptr(ptdesc)#2 ->lock#4 ->key ->&____s->seqcount ->lock#5 ->&folio_wait_table[i] ->&obj_hash[i].lock FD: 260 BD: 5 +.+.: k-sk_lock-AF_RXRPC ->k-slock-AF_RXRPC ->&rxnet->local_mutex ->&local->services_lock ->fs_reclaim ->pool_lock#2 ->&c->lock ->&rx->incoming_lock ->&obj_hash[i].lock ->&rxnet->conn_lock ->&call->waitq ->(rxrpc_call_limiter).lock ->&rx->recvmsg_lock ->&rx->call_lock ->&rxnet->call_lock ->(&call->timer) ->&base->lock ->&list->lock#18 ->quarantine_lock ->&n->list_lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 6 +...: k-slock-AF_RXRPC FD: 247 BD: 6 +.+.: &rxnet->local_mutex ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->crngs.lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#8 ->&____s->seqcount ->&c->lock ->&dir->lock ->k-sk_lock-AF_INET6 ->k-slock-AF_INET6 ->cpu_hotplug_lock ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&x->wait#21 ->&n->list_lock ->&cfs_rq->removed.lock ->&____s->seqcount#2 ->key ->pcpu_lock ->percpu_counters_lock FD: 6 BD: 54 +...: &table->hash[i].lock ->k-clock-AF_INET6 ->&table->hash2[i].lock ->k-clock-AF_INET ->clock-AF_INET ->clock-AF_INET6 FD: 1 BD: 55 +...: &table->hash2[i].lock FD: 226 BD: 2 +.+.: netstamp_work ->cpu_hotplug_lock FD: 26 BD: 7 ....: &x->wait#21 ->&p->pi_lock FD: 1 BD: 6 +.+.: &local->services_lock FD: 1 BD: 8 +.+.: &rxnet->conn_lock FD: 1 BD: 6 ....: &call->waitq FD: 1 BD: 6 +.+.: &rx->call_lock FD: 1 BD: 6 +.+.: &rxnet->call_lock FD: 57 BD: 1 +.+.: init_user_ns.keyring_sem ->key_user_lock ->root_key_user.lock ->fs_reclaim ->pool_lock#2 ->crngs.lock ->key_serial_lock ->key_construction_mutex ->&type->lock_class ->keyring_serialise_link_lock FD: 1 BD: 5 +.+.: root_key_user.lock FD: 1 BD: 6 +.+.: keyring_name_lock FD: 1 BD: 1 +.+.: template_list FD: 1 BD: 1 +.+.: idr_lock FD: 30 BD: 5 +.-.: (&rxnet->peer_keepalive_timer) FD: 49 BD: 1 +.+.: ima_extend_list_mutex ->fs_reclaim ->pool_lock#2 FD: 17 BD: 5 +.+.: (wq_completion)krxrpcd ->(work_completion)(&rxnet->peer_keepalive_work) ->(work_completion)(&rxnet->service_conn_reaper) FD: 14 BD: 6 +.+.: (work_completion)(&rxnet->peer_keepalive_work) ->&rxnet->peer_hash_lock ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 7 +.+.: &rxnet->peer_hash_lock FD: 28 BD: 4 +.+.: deferred_probe_work ->deferred_probe_mutex FD: 25 BD: 47 +.+.: &(&net->nexthop.notifier_chain)->rwsem ->&rq->__lock FD: 40 BD: 48 +.+.: k-sk_lock-AF_INET ->k-slock-AF_INET#2 ->&table->hash[i].lock ->&obj_hash[i].lock ->k-clock-AF_INET ->&rq->__lock FD: 23 BD: 49 +...: k-slock-AF_INET#2 ->pool_lock#2 ->batched_entropy_u32.lock ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->&____s->seqcount#2 ->&n->list_lock FD: 1 BD: 55 ++..: k-clock-AF_INET FD: 1 BD: 1 ....: power_off_handler_list.lock FD: 702 BD: 2 +.+.: reg_work ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock FD: 1 BD: 47 +...: reg_pending_beacons_lock FD: 713 BD: 2 +.+.: (work_completion)(&fw_work->work) ->fs_reclaim ->pool_lock#2 ->&fw_cache.lock ->tk_core.seq.seqcount ->async_lock ->init_task.alloc_lock ->&dentry->d_lock ->&sb->s_type->i_mutex_key ->&obj_hash[i].lock ->(console_sem).lock ->console_owner_lock ->console_owner ->&rq->__lock ->umhelper_sem ->fw_lock ->rtnl_mutex FD: 2 BD: 3 +.+.: &fw_cache.lock ->pool_lock#2 FD: 2 BD: 389 +.+.: &____s->seqcount#6 ->&____s->seqcount#6/1 FD: 1 BD: 1 +.+.: detector_work FD: 1 BD: 1 +.+.: acpi_gpio_deferred_req_irqs_lock FD: 1 BD: 1 ....: enable_lock FD: 1 BD: 1 +.+.: gpd_list_lock FD: 3 BD: 4 +.+.: subsys mutex#73 ->&k->k_lock FD: 2 BD: 14 +.+.: fw_lock ->&x->wait#22 FD: 1 BD: 15 ....: &x->wait#22 FD: 1 BD: 5 +.+.: cdev_lock FD: 268 BD: 3 +.+.: &tty->legacy_mutex ->&tty->read_wait ->&tty->write_wait ->&tty->ldisc_sem ->&tty->files_lock ->&port->lock ->&port->mutex ->&port_lock_key ->&f->f_lock ->&obj_hash[i].lock ->pool_lock#2 ->tasklist_lock ->fs_reclaim ->stock_lock ->tty_ldiscs_lock ->&k->list_lock ->&k->k_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&c->lock ->&xa->xa_lock#5 ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#25 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock ->&tty->ctrl.lock ->devpts_mutex ->redirect_lock ->&tty->legacy_mutex/1 ->&n->list_lock ->tty_mutex.wait_lock ->&p->pi_lock ->rcu_node_0 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&rcu_state.expedited_wq ->remove_cache_srcu ->&____s->seqcount FD: 1 BD: 10 ....: &tty->read_wait FD: 26 BD: 155 ....: &tty->write_wait ->&p->pi_lock FD: 250 BD: 5 ++++: &tty->ldisc_sem ->fs_reclaim ->&c->lock ->&____s->seqcount ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->init_mm.page_table_lock ->&tty->write_wait ->&tty->read_wait ->&tty->termios_rwsem ->&mm->mmap_lock ->&port_lock_key ->&port->lock ->&tty->flow.lock ->&ldata->atomic_read_lock ->&n->list_lock ->&o_tty->termios_rwsem/1 ->&buf->lock ->tty_ldiscs_lock ->&obj_hash[i].lock ->&tty->ldisc_sem/1 ->&rq->__lock ->remove_cache_srcu FD: 162 BD: 9 ++++: &tty->termios_rwsem ->&port->mutex ->&tty->write_wait ->&tty->read_wait ->&ldata->output_lock ->&port_lock_key ->vmap_area_lock ->&obj_hash[i].lock ->purge_vmap_area_lock ->pool_lock#2 ->&rq->__lock ->rcu_node_0 FD: 1 BD: 9 +.+.: &tty->files_lock FD: 1 BD: 155 ....: &port->lock FD: 26 BD: 387 ....: &wq#2 ->&p->pi_lock FD: 156 BD: 1 +.+.: &type->s_umount_key#24/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->&c->lock ->&____s->seqcount ->list_lrus_mutex ->sb_lock ->inode_hash_lock ->bdev_lock ->&disk->open_mutex ->&obj_hash[i].lock ->&wq->mutex ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->wq_pool_mutex ->mmu_notifier_invalidate_range_start ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&xa->xa_lock#9 ->lock#4 ->&mapping->i_private_lock ->tk_core.seq.seqcount ->bit_wait_table + i ->wq_mayday_lock ->&sbi->old_work_lock ->(work_completion)(&(&sbi->old_work)->work) ->&x->wait#23 FD: 1 BD: 11 +.+.: &bdev->bd_holder_lock FD: 1 BD: 2 +.+.: &sbi->old_work_lock FD: 1 BD: 2 +.+.: (work_completion)(&(&sbi->old_work)->work) FD: 1 BD: 9 ....: &x->wait#23 FD: 151 BD: 1 +.+.: &type->s_umount_key#25/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->inode_hash_lock ->bdev_lock ->&disk->open_mutex ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&xa->xa_lock#9 ->lock#4 ->&mapping->i_private_lock ->tk_core.seq.seqcount ->bit_wait_table + i ->&rq->__lock ->&obj_hash[i].lock ->&sb->s_type->i_lock_key#3 ->lock#5 ->&lruvec->lru_lock ->&c->lock ->crypto_alg_sem ->lock#2 ->&x->wait#23 FD: 17 BD: 53 +.+.: (work_completion)(work) ->lock#4 ->lock#5 FD: 334 BD: 1 +.+.: &type->s_umount_key#26/1 ->fs_reclaim ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->inode_hash_lock ->bdev_lock ->&disk->open_mutex ->&c->lock ->&____s->seqcount ->mmu_notifier_invalidate_range_start ->&xa->xa_lock#9 ->lock#4 ->&mapping->i_private_lock ->tk_core.seq.seqcount ->bit_wait_table + i ->&rq->__lock ->&obj_hash[i].lock ->&sb->s_type->i_lock_key#3 ->lock#5 ->&lruvec->lru_lock ->crypto_alg_sem ->pool_lock#2 ->percpu_counters_lock ->&sb->s_type->i_lock_key#22 ->&sb->s_type->i_mutex_key#8 ->proc_subdir_lock ->proc_inum_ida.xa_lock ->&journal->j_state_lock ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&journal->j_wait_done_commit ->&p->alloc_lock ->cpu_hotplug_lock ->wq_pool_mutex ->&ei->i_es_lock ->ext4_grpinfo_slab_create_mutex ->&s->s_inode_list_lock ->ext4_li_mtx ->lock ->&root->kernfs_rwsem ->(console_sem).lock ->&dentry->d_lock FD: 18 BD: 111 +.+.: &bgl->locks[i].lock ->&sbi->s_md_lock ->&obj_hash[i].lock ->pool_lock#2 ->&meta->lock ->kfence_freelist_lock ->&base->lock ->quarantine_lock ->&ei->i_prealloc_lock FD: 47 BD: 268 +.+.: &sb->s_type->i_lock_key#22 ->&dentry->d_lock ->&lru->node[i].lock ->bit_wait_table + i ->&xa->xa_lock#9 FD: 183 BD: 5 ++++: &sb->s_type->i_mutex_key#8 ->&ei->i_es_lock ->&ei->i_data_sem ->&ei->xattr_sem ->tk_core.seq.seqcount ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->&journal->j_state_lock ->jbd2_handle ->&obj_hash[i].lock ->&sb->s_type->i_lock_key#22 ->&wb->list_lock ->&mm->mmap_lock ->fs_reclaim ->&____s->seqcount ->&xa->xa_lock#9 ->lock#4 ->&mapping->i_private_lock ->&c->lock ->&rq->__lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->rcu_node_0 ->remove_cache_srcu ->mapping.invalidate_lock ->free_vmap_area_lock ->vmap_area_lock ->init_mm.page_table_lock ->pcpu_alloc_mutex ->batched_entropy_u32.lock ->swap_cgroup_mutex ->&fq->mq_flush_lock ->&x->wait#26 ->&base->lock ->key ->pcpu_lock ->percpu_counters_lock ->(&timer.timer) ->&((cluster_info + ci)->lock)/1 ->swapon_mutex ->proc_poll_wait.lock ->&dentry->d_lock ->&n->list_lock ->&____s->seqcount#2 ->&meta->lock ->&cfs_rq->removed.lock ->quarantine_lock ->stock_lock ->&rcu_state.expedited_wq FD: 21 BD: 110 ++++: &ei->i_es_lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&sbi->s_es_lock ->&obj_hash[i].lock ->key#2 ->key#5 ->key#6 ->key#7 ->&____s->seqcount#2 ->&n->list_lock ->&base->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->quarantine_lock FD: 92 BD: 109 ++++: &ei->i_data_sem ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->&ei->i_es_lock ->&obj_hash[i].lock ->&____s->seqcount ->&c->lock ->&ei->i_prealloc_lock ->&sb->s_type->i_lock_key#22 ->&(ei->i_block_reservation_lock) ->&lg->lg_mutex ->&mapping->i_private_lock ->&ei->i_raw_lock ->&rq->__lock ->&wb->list_lock ->lock#4 ->&ret->b_state_lock ->&journal->j_revoke_lock ->key#15 ->&sbi->s_md_lock ->key#3 ->&bgl->locks[i].lock ->rcu_node_0 ->&____s->seqcount#2 ->&n->list_lock ->&cfs_rq->removed.lock ->remove_cache_srcu ->&rcu_state.expedited_wq ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->&base->lock ->pool_lock ->&wb->work_lock ->bit_wait_table + i ->&pa->pa_lock#2 ->&xa->xa_lock#9 FD: 1 BD: 111 +.+.: &sbi->s_es_lock FD: 64 BD: 110 ++++: &journal->j_state_lock ->&journal->j_wait_done_commit ->&journal->j_wait_commit ->tk_core.seq.seqcount ->&obj_hash[i].lock ->&base->lock ->&journal->j_wait_updates ->&journal->j_wait_transaction_locked ->&journal->j_list_lock ->&journal->j_wait_reserved FD: 26 BD: 111 ....: &journal->j_wait_done_commit ->&p->pi_lock FD: 26 BD: 111 ....: &journal->j_wait_commit ->&p->pi_lock FD: 104 BD: 2 +.+.: ext4_grpinfo_slab_create_mutex ->slab_mutex FD: 53 BD: 3 +.+.: ext4_li_mtx ->fs_reclaim ->pool_lock#2 ->batched_entropy_u16.lock ->&eli->li_list_mtx ->kthread_create_lock ->&p->pi_lock ->&x->wait ->&rq->__lock ->&obj_hash[i].lock FD: 1 BD: 1 ....: &rs->lock FD: 168 BD: 5 ++++: &type->i_mutex_dir_key#3 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->rename_lock.seqcount ->&ei->i_es_lock ->&ei->i_data_sem ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&xa->xa_lock#9 ->lock#4 ->&mapping->i_private_lock ->tk_core.seq.seqcount ->bit_wait_table + i ->&rq->__lock ->inode_hash_lock ->&obj_hash[i].lock ->&journal->j_state_lock ->&sb->s_type->i_lock_key#22 ->&ei->xattr_sem ->namespace_sem ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&c->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->tomoyo_ss ->&s->s_inode_list_lock ->jbd2_handle ->&mm->mmap_lock ->&n->list_lock ->&____s->seqcount#2 ->remove_cache_srcu ->&meta->lock ->rcu_node_0 ->&cfs_rq->removed.lock ->&rcu_state.expedited_wq ->quarantine_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock ->&dentry->d_lock/1 ->&sem->wait_lock FD: 73 BD: 109 ++++: &ei->xattr_sem ->mmu_notifier_invalidate_range_start ->lock#4 ->&mapping->i_private_lock ->pool_lock#2 ->&ret->b_state_lock ->&journal->j_revoke_lock ->tk_core.seq.seqcount ->&ei->i_raw_lock ->&rq->__lock ->&____s->seqcount ->&xa->xa_lock#9 ->rcu_node_0 ->&c->lock ->stock_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->&rcu_state.expedited_wq ->bit_wait_table + i FD: 26 BD: 50 ..-.: &x->wait#24 ->&p->pi_lock FD: 17 BD: 2 +.+.: (work_completion)(&s->destroy_work) ->&obj_hash[i].lock ->pool_lock#2 ->&rsp->gp_wait ->pcpu_lock ->&base->lock FD: 143 BD: 97 ++++: &vma->vm_lock->lock ->fs_reclaim ->&____s->seqcount ->pool_lock#2 ->ptlock_ptr(ptdesc)#2 ->mmu_notifier_invalidate_range_start ->mapping.invalidate_lock ->&mm->page_table_lock ->&rq->__lock ->rcu_node_0 ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&c->lock ->&folio_wait_table[i] ->&rcu_state.gp_wq ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->&lruvec->lru_lock ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->&cfs_rq->removed.lock ->&rcu_state.expedited_wq ->stock_lock ->&xa->xa_lock#9 ->&sb->s_type->i_lock_key ->&info->lock ->lock#4 ->tk_core.seq.seqcount ->mount_lock ->&n->list_lock ->&____s->seqcount#2 ->&sem->wait_lock ->&base->lock ->pool_lock FD: 45 BD: 524 ++++: &anon_vma->rwsem ->&mm->page_table_lock ->&obj_hash[i].lock ->pool_lock#2 ->&c->lock ->&rq->__lock ->&sem->wait_lock ->&____s->seqcount ->&n->list_lock ->mmu_notifier_invalidate_range_start ->ptlock_ptr(ptdesc)#2 ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->&cfs_rq->removed.lock ->stock_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&____s->seqcount#2 ->key ->pcpu_lock ->percpu_counters_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->batched_entropy_u8.lock ->&base->lock FD: 195 BD: 1 +.+.: &sig->cred_guard_mutex ->fs_reclaim ->pool_lock#2 ->init_fs.lock ->&p->pi_lock ->mapping.invalidate_lock ->&folio_wait_table[i] ->&rq->__lock ->tomoyo_ss ->binfmt_lock ->init_binfmt_misc.entries_lock ->&dentry->d_lock ->&type->i_mutex_dir_key#3 ->&sb->s_type->i_lock_key#22 ->&obj_hash[i].lock ->&ei->xattr_sem ->&tsk->futex_exit_mutex ->&sig->exec_update_lock ->&fs->lock ->lock#4 ->&sb->s_type->i_mutex_key#8 ->&p->alloc_lock ->&c->lock ->&____s->seqcount ->tk_core.seq.seqcount ->&mm->mmap_lock ->&stopper->lock ->&stop_pi_lock ->&x->wait#8 ->&n->list_lock ->remove_cache_srcu ->key ->pcpu_lock ->percpu_counters_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->quarantine_lock ->&____s->seqcount#2 ->rcu_node_0 ->&cfs_rq->removed.lock ->key#8 ->&rcu_state.expedited_wq ->stock_lock FD: 137 BD: 99 ++++: mapping.invalidate_lock ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&xa->xa_lock#9 ->lock#4 ->&ei->i_es_lock ->&ei->i_data_sem ->pool_lock#2 ->tk_core.seq.seqcount ->&c->lock ->&folio_wait_table[i] ->&rq->__lock ->&obj_hash[i].lock ->&n->list_lock ->key ->pcpu_lock ->percpu_counters_lock ->&mapping->i_mmap_rwsem ->&journal->j_state_lock ->jbd2_handle FD: 1 BD: 4 ++++: init_binfmt_misc.entries_lock FD: 30 BD: 1 ..-.: &(&ipvs->defense_work)->timer FD: 30 BD: 1 ..-.: &(&gc_work->dwork)->timer FD: 37 BD: 2 +.+.: (work_completion)(&(&gc_work->dwork)->work) ->rcu_node_0 ->&rq->__lock ->&obj_hash[i].lock ->&base->lock ->&cfs_rq->removed.lock ->pool_lock#2 FD: 32 BD: 6 +.+.: (work_completion)(&(&ipvs->defense_work)->work) ->&s->s_inode_list_lock ->&ipvs->dropentry_lock ->&ipvs->droppacket_lock ->&ipvs->securetcp_lock ->&obj_hash[i].lock ->&base->lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&rq->__lock FD: 1 BD: 130 ...-: &____s->seqcount#7 FD: 1 BD: 7 +...: &ipvs->dropentry_lock FD: 1 BD: 7 +...: &ipvs->droppacket_lock FD: 1 BD: 7 +...: &ipvs->securetcp_lock FD: 176 BD: 2 +.+.: &sig->exec_update_lock ->&p->alloc_lock ->&sighand->siglock ->&newf->file_lock ->batched_entropy_u64.lock ->&mm->mmap_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->quarantine_lock ->pool_lock ->&rq->__lock ->&cfs_rq->removed.lock ->stock_lock ->&meta->lock ->kfence_freelist_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 3 BD: 57 ..-.: batched_entropy_u16.lock ->crngs.lock FD: 1 BD: 526 +.+.: ptlock_ptr(ptdesc)#2/1 FD: 1 BD: 111 ....: key#2 FD: 1 BD: 17 ..-.: task_group_lock FD: 96 BD: 1 +.+.: &type->s_umount_key#27/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&____s->seqcount ->&c->lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#23 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&sb->s_type->i_mutex_key#9 ->&dentry->d_lock FD: 38 BD: 136 +.+.: &sb->s_type->i_lock_key#23 ->&dentry->d_lock ->&lru->node[i].lock ->bit_wait_table + i ->&p->pi_lock FD: 89 BD: 4 ++++: &sb->s_type->i_mutex_key#9 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#23 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->rename_lock.seqcount ->proc_subdir_lock ->&p->alloc_lock ->&pid->lock ->sysctl_lock ->namespace_sem ->tomoyo_ss ->&c->lock ->&____s->seqcount ->&n->list_lock ->&rq->__lock ->rcu_node_0 ->&xa->xa_lock#5 ->&obj_hash[i].lock ->stock_lock ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->remove_cache_srcu ->&rcu_state.expedited_wq ->batched_entropy_u8.lock ->kfence_freelist_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&rcu_state.gp_wq ->key ->pcpu_lock ->percpu_counters_lock ->quarantine_lock FD: 745 BD: 2 +.+.: &p->lock ->fs_reclaim ->pool_lock#2 ->&mm->mmap_lock ->file_systems_lock ->namespace_sem ->&c->lock ->&____s->seqcount ->&of->mutex ->&n->list_lock ->remove_cache_srcu ->&rq->__lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->rcu_node_0 ->module_mutex ->&rcu_state.expedited_wq ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->capidev_list_lock FD: 97 BD: 1 +.+.: &type->s_umount_key#28/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->&c->lock ->&____s->seqcount ->sb_lock ->&root->kernfs_rwsem ->&sb->s_type->i_lock_key#24 ->crngs.lock ->&root->kernfs_supers_rwsem ->&dentry->d_lock FD: 38 BD: 266 +.+.: &sb->s_type->i_lock_key#24 ->&dentry->d_lock ->&lru->node[i].lock ->bit_wait_table + i ->&p->pi_lock FD: 176 BD: 3 ++++: &type->i_mutex_dir_key#4 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->rename_lock.seqcount ->&root->kernfs_rwsem ->mmu_notifier_invalidate_range_start ->iattr_mutex ->&obj_hash[i].lock ->&sb->s_type->i_lock_key#24 ->&c->lock ->&____s->seqcount ->namespace_sem ->&mm->mmap_lock ->vmap_area_lock ->tk_core.seq.seqcount ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->quarantine_lock ->remove_cache_srcu ->&rq->__lock ->&base->lock ->rename_lock ->&sem->wait_lock ->&p->pi_lock ->&____s->seqcount#2 ->rcu_node_0 ->&cfs_rq->removed.lock ->&rcu_state.gp_wq ->&rcu_state.expedited_wq ->key ->pcpu_lock ->percpu_counters_lock ->iattr_mutex.wait_lock ->stock_lock ->&meta->lock FD: 50 BD: 196 +.+.: iattr_mutex ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 ->tk_core.seq.seqcount ->&rq->__lock ->iattr_mutex.wait_lock FD: 63 BD: 1 +.+.: &type->s_umount_key#29 ->&x->wait#23 ->shrinker_mutex ->&obj_hash[i].lock ->pool_lock#2 ->&dentry->d_lock ->rename_lock.seqcount ->&sb->s_type->i_lock_key#23 ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->&dentry->d_lock/1 FD: 26 BD: 147 ....: &x->wait#25 ->&p->pi_lock FD: 38 BD: 8 +.+.: &net->unx.table.locks[i] ->&net->unx.table.locks[i]/1 FD: 782 BD: 1 +.+.: &sb->s_type->i_mutex_key#10 ->&net->unx.table.locks[i] ->&u->lock ->&u->peer_wait ->rlock-AF_UNIX ->pool_lock#2 ->&dir->lock ->&obj_hash[i].lock ->sk_lock-AF_INET ->slock-AF_INET#2 ->clock-AF_INET ->rcu_node_0 ->&rq->__lock ->nl_table_lock ->nl_table_wait.lock ->clock-AF_NETLINK ->&nlk->wait ->(netlink_chain).rwsem ->sk_lock-AF_INET6 ->slock-AF_INET6 ->clock-AF_INET6 ->&table->hash[i].lock ->&net->packet.sklist_lock ->&po->bind_lock ->sk_lock-AF_PACKET ->slock-AF_PACKET ->fanout_mutex ->&x->wait#3 ->clock-AF_PACKET ->pcpu_lock ->&cfs_rq->removed.lock ->cb_lock ->genl_sk_destructing_waitq.lock ->sk_lock-AF_BLUETOOTH-BTPROTO_HCI ->slock-AF_BLUETOOTH-BTPROTO_HCI ->hci_dev_list_lock ->quarantine_lock ->stock_lock ->&net->ipv4.ra_mutex ->&hashinfo->lock ->sk_lock-AF_INET/1 ->&net->sctp.addr_wq_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&rnp->exp_lock ->&rnp->exp_wq[2] ->&rnp->exp_wq[0] ->&base->lock ->&rcu_state.expedited_wq ->&____s->seqcount ->pool_lock ->rcu_state.exp_mutex.wait_lock ->&p->pi_lock ->l2tp_ip_lock ->key ->percpu_counters_lock ->sk_lock-AF_INET6/1 ->l2tp_ip6_lock ->&meta->lock ->kfence_freelist_lock ->sk_lock-AF_PHONET ->slock-AF_PHONET ->clock-AF_PHONET ->rcu_state.exp_mutex ->sk_lock-AF_QIPCRTR ->slock-AF_QIPCRTR ->&hashinfo->lock#2 ->&ping_table.lock ->(work_completion)(&msk->work) ->sk_lock-AF_PPPOX ->slock-AF_PPPOX FD: 46 BD: 3 +.+.: &u->lock ->clock-AF_UNIX ->&u->lock/1 ->rlock-AF_UNIX ->&u->peer_wait ->&sk->sk_peer_lock ->&ei->socket.wq.wait FD: 1 BD: 4 +...: clock-AF_UNIX FD: 26 BD: 4 +.+.: &u->peer_wait ->&p->pi_lock FD: 1 BD: 4 +.+.: rlock-AF_UNIX FD: 184 BD: 1 .+.+: sb_writers#3 ->mount_lock ->tk_core.seq.seqcount ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&journal->j_state_lock ->jbd2_handle ->&obj_hash[i].lock ->&sb->s_type->i_lock_key#22 ->&wb->list_lock ->&wb->work_lock ->&type->i_mutex_dir_key#3 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&type->i_mutex_dir_key#3/1 ->remove_cache_srcu ->&zone->lock ->&xa->xa_lock#9 ->lock#4 ->&mapping->i_private_lock ->bit_wait_table + i ->&rq->__lock ->&sb->s_type->i_mutex_key#8 ->tomoyo_ss ->&n->list_lock ->&s->s_inode_list_lock ->sb_internal ->inode_hash_lock ->&fsnotify_mark_srcu ->&cfs_rq->removed.lock ->&____s->seqcount#2 ->rcu_node_0 ->&rcu_state.expedited_wq ->quarantine_lock ->key ->pcpu_lock ->percpu_counters_lock ->&base->lock ->stock_lock ->&sem->wait_lock ->&p->pi_lock ->fs_reclaim ->&mm->mmap_lock ->&p->alloc_lock ->&f->f_lock ->lock#5 ->&lruvec->lru_lock FD: 174 BD: 2 .+.+: sb_writers#4 ->mount_lock ->tk_core.seq.seqcount ->&sb->s_type->i_lock_key#23 ->&wb->list_lock ->&sb->s_type->i_mutex_key#9 ->sysctl_lock ->&dentry->d_lock ->tomoyo_ss ->fs_reclaim ->pool_lock#2 ->&mm->mmap_lock ->&obj_hash[i].lock ->oom_adj_mutex ->remove_cache_srcu ->&c->lock ->&p->pi_lock ->&____s->seqcount#10 ->&(&net->ipv4.ping_group_range.lock)->lock ->&rq->__lock ->&n->list_lock ->rcu_node_0 ->oom_adj_mutex.wait_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock ->&ent->pde_unload_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&____s->seqcount#2 ->&____s->seqcount FD: 1 BD: 5 +.+.: &pid->lock FD: 170 BD: 1 ++++: &type->s_umount_key#30 ->&lru->node[i].lock ->&dentry->d_lock ->&sb->s_type->i_lock_key#22 ->&sbi->s_writepages_rwsem ->&sem->waiters ->&rsp->gp_wait ->&journal->j_state_lock ->&p->alloc_lock ->(work_completion)(&sbi->s_sb_upd_work) ->key#3 ->key#4 ->&sbi->s_error_lock ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->tk_core.seq.seqcount ->&obj_hash[i].lock ->&base->lock ->bit_wait_table + i ->&rq->__lock ->ext4_li_mtx ->(console_sem).lock ->mount_lock ->&eli->li_list_mtx ->&wb->list_lock FD: 132 BD: 4 ++++: &sbi->s_writepages_rwsem ->&rsp->gp_wait ->&obj_hash[i].lock ->&x->wait#3 ->&rq->__lock ->mmu_notifier_invalidate_range_start ->batched_entropy_u8.lock ->kfence_freelist_lock ->lock#4 ->lock#5 ->&meta->lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&journal->j_state_lock ->jbd2_handle ->tk_core.seq.seqcount ->&base->lock ->&xa->xa_lock#9 ->&____s->seqcount#2 ->&rq_wait->wait ->&__ctx->lock ->rcu_node_0 ->&cfs_rq->removed.lock FD: 1 BD: 2 ....: &sem->waiters FD: 130 BD: 3 +.+.: (work_completion)(&sbi->s_sb_upd_work) ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->&journal->j_state_lock ->jbd2_handle ->&obj_hash[i].lock FD: 1 BD: 110 ....: key#3 FD: 1 BD: 109 ....: key#4 FD: 1 BD: 109 +.+.: &sbi->s_error_lock FD: 4 BD: 4 +.+.: &eli->li_list_mtx ->&obj_hash[i].lock ->pool_lock#2 FD: 129 BD: 108 ++++: jbd2_handle ->&mapping->i_private_lock ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&ret->b_state_lock ->&journal->j_revoke_lock ->&ei->i_raw_lock ->&journal->j_wait_updates ->&meta_group_info[i]->alloc_sem ->tk_core.seq.seqcount ->inode_hash_lock ->batched_entropy_u32.lock ->&ei->xattr_sem ->&obj_hash[i].lock ->&ei->i_es_lock ->&dentry->d_lock ->smack_known_lock ->&sb->s_type->i_lock_key#22 ->&journal->j_state_lock ->&rq->__lock ->lock#4 ->lock#5 ->&ei->i_data_sem ->&xa->xa_lock#9 ->bit_wait_table + i ->key#3 ->key#4 ->&sbi->s_error_lock ->&sbi->s_orphan_lock ->&journal->j_list_lock ->stock_lock ->&____s->seqcount#2 ->&n->list_lock ->&cfs_rq->removed.lock ->rcu_node_0 ->quarantine_lock ->&bgl->locks[i].lock ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->smack_known_lock.wait_lock ->&p->pi_lock ->&rq_wait->wait ->&__ctx->lock ->&rcu_state.expedited_wq ->&base->lock ->&lock->wait_lock ->key ->pcpu_lock ->percpu_counters_lock ->&ei->i_prealloc_lock FD: 59 BD: 114 +.+.: &ret->b_state_lock ->&journal->j_list_lock ->&obj_hash[i].lock ->bit_wait_table + i FD: 58 BD: 117 +.+.: &journal->j_list_lock ->&sb->s_type->i_lock_key#3 ->&wb->list_lock ->key#14 ->&obj_hash[i].lock ->pool_lock#2 ->&wb->work_lock FD: 1 BD: 112 +.+.: &journal->j_revoke_lock FD: 1 BD: 112 +.+.: &ei->i_raw_lock FD: 26 BD: 111 ....: &journal->j_wait_updates ->&p->pi_lock FD: 13 BD: 284 -...: &wb->work_lock ->&obj_hash[i].lock ->&base->lock FD: 85 BD: 109 ++++: &meta_group_info[i]->alloc_sem ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->tk_core.seq.seqcount ->&____s->seqcount ->&c->lock ->&obj_hash[i].lock ->&base->lock ->&x->wait#26 ->&__ctx->lock ->&rq->__lock ->(&timer.timer) ->&fq->mq_flush_lock ->&rq_wait->wait ->rcu_node_0 ->&ret->b_state_lock ->&bgl->locks[i].lock ->&cfs_rq->removed.lock FD: 133 BD: 3 .+.+: sb_internal ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->pool_lock#2 ->&journal->j_state_lock ->jbd2_handle ->&obj_hash[i].lock ->&c->lock ->remove_cache_srcu ->&rq->__lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&n->list_lock ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->quarantine_lock ->&base->lock ->rcu_node_0 ->key ->pcpu_lock ->percpu_counters_lock ->&rcu_state.expedited_wq ->stock_lock FD: 2 BD: 113 ++++: &ei->i_prealloc_lock ->&pa->pa_lock#2 FD: 27 BD: 1 .+.+: file_rwsem ->&ctx->flc_lock ->&rq->__lock FD: 2 BD: 2 +.+.: &ctx->flc_lock ->&fll->lock FD: 1 BD: 3 +.+.: &fll->lock FD: 160 BD: 2 +.+.: &type->i_mutex_dir_key#3/1 ->rename_lock.seqcount ->&dentry->d_lock ->fs_reclaim ->&ei->i_es_lock ->&ei->i_data_sem ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&xa->xa_lock#9 ->lock#4 ->pool_lock#2 ->&mapping->i_private_lock ->tk_core.seq.seqcount ->bit_wait_table + i ->&rq->__lock ->inode_hash_lock ->&obj_hash[i].lock ->&journal->j_state_lock ->&sb->s_type->i_lock_key#22 ->&c->lock ->&ei->xattr_sem ->tomoyo_ss ->&s->s_inode_list_lock ->jbd2_handle ->&sb->s_type->i_mutex_key#8 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->rcu_node_0 ->&xa->xa_lock#5 ->stock_lock ->&____s->seqcount#2 ->&fsnotify_mark_srcu ->&type->i_mutex_dir_key#3 ->&wb->list_lock ->sb_internal ->&n->list_lock ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock ->remove_cache_srcu ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&rcu_state.expedited_wq ->quarantine_lock ->&base->lock FD: 76 BD: 1 +.+.: &type->s_umount_key#31/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#25 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&sb->s_type->i_mutex_key#11 ->&dentry->d_lock FD: 37 BD: 7 +.+.: &sb->s_type->i_lock_key#25 ->&dentry->d_lock FD: 62 BD: 2 +.+.: &sb->s_type->i_mutex_key#11 ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#25 ->&s->s_inode_list_lock ->tk_core.seq.seqcount FD: 62 BD: 1 +.+.: &type->s_umount_key#32 ->sb_lock ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->&c->lock ->&____s->seqcount ->&lru->node[i].lock ->&obj_hash[i].lock FD: 39 BD: 1 +.+.: &type->s_umount_key#33 ->sb_lock ->&dentry->d_lock FD: 39 BD: 1 +.+.: &type->s_umount_key#34 ->sb_lock ->&dentry->d_lock FD: 75 BD: 1 +.+.: &type->s_umount_key#35/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#26 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&dentry->d_lock FD: 37 BD: 4 +.+.: &sb->s_type->i_lock_key#26 ->&dentry->d_lock FD: 39 BD: 1 +.+.: &type->s_umount_key#36 ->sb_lock ->&dentry->d_lock FD: 1 BD: 4 +.+.: redirect_lock FD: 243 BD: 1 +.+.: &tty->atomic_write_lock ->fs_reclaim ->pool_lock#2 ->&mm->mmap_lock ->&tty->termios_rwsem ->&tty->files_lock FD: 36 BD: 10 +.+.: &ldata->output_lock ->&port_lock_key ->&rq->__lock FD: 76 BD: 1 +.+.: &type->s_umount_key#37/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#27 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->fuse_mutex ->&dentry->d_lock FD: 37 BD: 131 +.+.: &sb->s_type->i_lock_key#27 ->&dentry->d_lock FD: 1 BD: 2 +.+.: fuse_mutex FD: 81 BD: 1 +.+.: &type->s_umount_key#38/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#28 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->pstore_sb_lock ->&sb->s_type->i_mutex_key#12 ->&dentry->d_lock FD: 37 BD: 2 +.+.: &sb->s_type->i_lock_key#28 ->&dentry->d_lock FD: 57 BD: 4 +.+.: &sb->s_type->i_mutex_key#12 ->fs_reclaim ->&zone->lock ->&____s->seqcount ->&psinfo->read_mutex ->&obj_hash[i].lock FD: 56 BD: 5 +.+.: &psinfo->read_mutex ->(efivars_lock).lock ->fs_reclaim ->pool_lock#2 ->(efi_runtime_lock).lock ->&obj_hash[i].lock ->&x->wait#12 ->&rq->__lock FD: 79 BD: 1 +.+.: &type->s_umount_key#39/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#29 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&____s->seqcount ->bpf_preload_lock ->&dentry->d_lock FD: 37 BD: 2 +.+.: &sb->s_type->i_lock_key#29 ->&dentry->d_lock FD: 56 BD: 2 +.+.: bpf_preload_lock ->(kmod_concurrent_max).lock ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&x->wait#17 ->&rq->__lock ->key ->pcpu_lock ->percpu_counters_lock ->running_helpers_waitq.lock FD: 30 BD: 1 ++++: uts_sem ->hostname_poll.wait.lock ->&rq->__lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 FD: 162 BD: 3 ++++: &type->i_mutex_dir_key#5 ->fs_reclaim ->&dentry->d_lock ->rename_lock.seqcount ->tomoyo_ss ->&sbinfo->stat_lock ->pool_lock#2 ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&xattrs->lock ->&obj_hash[i].lock ->&simple_offset_xa_lock ->smack_known_lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->remove_cache_srcu ->&rq->__lock ->&sem->wait_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->rcu_node_0 ->quarantine_lock ->&p->pi_lock ->&mm->mmap_lock ->vmap_area_lock ->&____s->seqcount#2 ->&meta->lock ->&cfs_rq->removed.lock ->smack_known_lock.wait_lock ->&rcu_state.gp_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&rcu_state.expedited_wq FD: 176 BD: 2 .+.+: sb_writers#5 ->mount_lock ->&type->i_mutex_dir_key#5 ->&type->i_mutex_dir_key#5/1 ->tk_core.seq.seqcount ->&sb->s_type->i_lock_key ->&wb->list_lock ->&sb->s_type->i_mutex_key#13 ->&sem->wait_lock ->&p->pi_lock ->&rq->__lock ->&s->s_inode_list_lock ->&info->lock ->&obj_hash[i].lock ->pool_lock#2 ->&sbinfo->stat_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->tomoyo_ss ->&xattrs->lock ->fs_reclaim ->lock#4 ->lock#5 ->&lruvec->lru_lock ->&dentry->d_lock ->quarantine_lock ->&cfs_rq->removed.lock ->rcu_node_0 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&base->lock FD: 95 BD: 3 +.+.: &type->i_mutex_dir_key#5/1 ->rename_lock.seqcount ->fs_reclaim ->&dentry->d_lock ->tomoyo_ss ->&sbinfo->stat_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->batched_entropy_u32.lock ->&xattrs->lock ->&obj_hash[i].lock ->&simple_offset_xa_lock ->smack_known_lock ->&c->lock ->&____s->seqcount ->&u->bindlock ->pool_lock#2 ->&sb->s_type->i_mutex_key#13/4 ->&n->list_lock ->&sem->wait_lock ->&rq->__lock ->&sb->s_type->i_mutex_key#13 ->&fsnotify_mark_srcu ->lock#4 ->lock#5 ->&lruvec->lru_lock ->&info->lock ->&xa->xa_lock#9 ->remove_cache_srcu ->quarantine_lock ->&____s->seqcount#2 ->rcu_node_0 ->&cfs_rq->removed.lock ->&p->pi_lock ->&rcu_state.gp_wq ->batched_entropy_u8.lock ->kfence_freelist_lock ->&rcu_state.expedited_wq ->smack_known_lock.wait_lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->key#9 FD: 27 BD: 11 +.-.: (&cb->timer) ->&obj_hash[i].lock ->&base->lock ->tk_core.seq.seqcount ->&rq_wait->wait FD: 2 BD: 8 +.+.: &f->f_lock ->fasync_lock FD: 1 BD: 2 ....: hostname_poll.wait.lock FD: 1 BD: 111 ....: key#5 FD: 1 BD: 111 ....: key#6 FD: 1 BD: 111 ....: key#7 FD: 761 BD: 1 +.+.: &f->f_pos_lock ->&type->i_mutex_dir_key#3 ->&mm->mmap_lock ->&type->i_mutex_dir_key#4 ->sb_writers#5 ->&type->i_mutex_dir_key#5 ->&p->lock ->sysctl_lock ->fs_reclaim ->&zone->lock ->&____s->seqcount ->&obj_hash[i].lock ->&rq->__lock ->&cfs_rq->removed.lock ->&sb->s_type->i_mutex_key#17 ->rcu_node_0 ->&rcu_state.expedited_wq ->pool_lock#2 ->sb_writers#4 ->&lock->wait_lock FD: 147 BD: 96 +.+.: &mm->mmap_lock/1 ->fs_reclaim ->pool_lock#2 ->&c->lock ->&vma->vm_lock->lock ->&mapping->i_mmap_rwsem ->&anon_vma->rwsem ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&mm->page_table_lock ->ptlock_ptr(ptdesc)#2 ->&rq->__lock ->&sem->wait_lock ->&p->pi_lock ->remove_cache_srcu ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->rcu_node_0 ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->stock_lock ->&rcu_state.expedited_wq ->&____s->seqcount#2 ->key#22 ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->key ->pcpu_lock ->percpu_counters_lock FD: 13 BD: 5 +.-.: (&net->can.stattimer) ->&obj_hash[i].lock ->&base->lock FD: 165 BD: 4 +.+.: &sb->s_type->i_mutex_key#13 ->&xattrs->lock ->tk_core.seq.seqcount ->&mm->mmap_lock ->fs_reclaim ->&____s->seqcount ->&xa->xa_lock#9 ->&sb->s_type->i_lock_key ->&info->lock ->lock#4 ->&wb->list_lock ->key#9 ->&sb->s_type->i_mutex_key#13/4 ->&rq->__lock ->&simple_offset_xa_lock ->&dentry->d_lock ->&mapping->i_mmap_rwsem ->lock#5 ->&lruvec->lru_lock ->&obj_hash[i].lock ->tomoyo_ss ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&cfs_rq->removed.lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock FD: 64 BD: 6 +.+.: &u->bindlock ->&net->unx.table.locks[i] ->&net->unx.table.locks[i]/1 ->&bsd_socket_locks[i] ->fs_reclaim ->pool_lock#2 ->batched_entropy_u32.lock FD: 37 BD: 9 +.+.: &net->unx.table.locks[i]/1 ->&dentry->d_lock FD: 1 BD: 7 +.+.: &bsd_socket_locks[i] FD: 176 BD: 1 +.+.: &u->iolock ->rlock-AF_UNIX ->&u->peer_wait ->&mm->mmap_lock ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock ->quarantine_lock ->rcu_node_0 ->&meta->lock ->kfence_freelist_lock ->&u->lock ->&dir->lock ->&base->lock ->&cfs_rq->removed.lock ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 28 BD: 256 ..-.: &ei->socket.wq.wait ->&p->pi_lock ->&ep->lock FD: 1 BD: 2 ....: key#8 FD: 39 BD: 4 +.+.: &u->lock/1 ->&sk->sk_peer_lock ->&dentry->d_lock ->&sk->sk_peer_lock/1 FD: 169 BD: 1 +.+.: &pipe->mutex/1 ->&pipe->rd_wait ->&pipe->wr_wait ->fs_reclaim ->&____s->seqcount ->&mm->mmap_lock ->&rq->__lock ->&lock->wait_lock ->&obj_hash[i].lock ->&cfs_rq->removed.lock ->rcu_node_0 ->stock_lock ->&rcu_state.expedited_wq ->pool_lock#2 FD: 28 BD: 4 ....: &pipe->rd_wait ->&p->pi_lock ->&ep->lock FD: 28 BD: 4 ....: &pipe->wr_wait ->&p->pi_lock ->&ep->lock FD: 47 BD: 1 .+.+: sb_writers#6 ->tk_core.seq.seqcount ->mount_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 174 BD: 1 +.+.: sk_lock-AF_NETLINK ->slock-AF_NETLINK ->&mm->mmap_lock ->fs_reclaim ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->pcpu_alloc_mutex ->&obj_hash[i].lock ->batched_entropy_u32.lock ->vmap_purge_lock ->&fp->aux->used_maps_mutex ->&rq->__lock ->&cfs_rq->removed.lock ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 1 BD: 2 +...: slock-AF_NETLINK FD: 2 BD: 6 +.+.: &sk->sk_peer_lock ->&sk->sk_peer_lock/1 FD: 1 BD: 1 ....: &rs->lock#2 FD: 30 BD: 1 ..-.: &(&krcp->monitor_work)->timer FD: 32 BD: 2 +.+.: (work_completion)(&(&krcp->monitor_work)->work) ->krc.lock ->&obj_hash[i].lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 30 BD: 1 ..-.: &(&tbl->managed_work)->timer FD: 50 BD: 3 +.+.: oom_adj_mutex ->&p->alloc_lock ->&rq->__lock ->rcu_node_0 ->oom_adj_mutex.wait_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->&rcu_state.expedited_wq FD: 76 BD: 3 +.+.: &group->mark_mutex ->&fsnotify_mark_srcu ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 ->lock ->ucounts_lock ->&mark->lock ->&conn->lock ->&sb->s_type->i_lock_key#22 ->&sb->s_type->i_lock_key ->&____s->seqcount#2 ->remove_cache_srcu ->&rq->__lock ->&lock->wait_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 13 BD: 190 +.+.: &group->inotify_data.idr_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock FD: 3 BD: 4 +.+.: &mark->lock ->&fsnotify_mark_srcu ->&conn->lock FD: 1 BD: 8 +.+.: &conn->lock FD: 184 BD: 2 +.+.: &ep->mtx ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&f->f_lock ->&ei->socket.wq.wait ->&ep->lock ->&group->notification_waitq ->&group->notification_lock ->&sighand->signalfd_wqh ->&sighand->siglock ->&mm->mmap_lock ->&rq->__lock ->&pipe->rd_wait ->key#10 ->&obj_hash[i].lock ->&lock->wait_lock ->sysctl_lock ->&pipe->wr_wait ->&cfs_rq->removed.lock ->rcu_node_0 ->&____s->seqcount#2 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 185 BD: 1 +.+.: epnested_mutex ->&ep->mtx FD: 27 BD: 271 ...-: &ep->lock ->&ep->wq FD: 28 BD: 8 ....: &group->notification_waitq ->&p->pi_lock ->&ep->lock FD: 1 BD: 8 +.+.: &group->notification_lock FD: 28 BD: 101 ....: &sighand->signalfd_wqh ->&ep->lock ->&p->pi_lock FD: 738 BD: 1 .+.+: sb_writers#7 ->mount_lock ->tk_core.seq.seqcount ->&sb->s_type->i_lock_key#24 ->&wb->list_lock ->&type->i_mutex_dir_key#4 ->fs_reclaim ->pool_lock#2 ->&mm->mmap_lock ->&of->mutex ->&obj_hash[i].lock ->remove_cache_srcu ->&c->lock ->&n->list_lock ->quarantine_lock ->&root->kernfs_iattr_rwsem ->&dentry->d_lock ->tomoyo_ss ->iattr_mutex ->&sb->s_type->i_mutex_key#14 ->&xattrs->lock ->&rq->__lock ->&cfs_rq->removed.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 19 BD: 1 +.-.: (&vblank->disable_timer) ->&dev->vbl_lock FD: 3 BD: 8 +.+.: swap_lock ->&p->lock#2 FD: 62 BD: 1 .+.+: kn->active ->fs_reclaim ->&c->lock ->pool_lock#2 ->&kernfs_locks->open_file_mutex[count] ->&k->list_lock ->uevent_sock_mutex ->&obj_hash[i].lock ->&n->list_lock ->&____s->seqcount ->quarantine_lock FD: 48 BD: 58 +.+.: &kernfs_locks->open_file_mutex[count] ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&n->list_lock ->&____s->seqcount ->remove_cache_srcu ->&rq->__lock ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->&lock->wait_lock ->rcu_node_0 FD: 737 BD: 6 +.+.: &of->mutex ->&rq->__lock ->cgroup_mutex ->&p->pi_lock ->cgroup_mutex.wait_lock ->&root->deactivate_waitq FD: 1 BD: 251 ..-.: rlock-AF_NETLINK FD: 26 BD: 272 ..-.: &ep->wq ->&p->pi_lock FD: 60 BD: 1 .+.+: kn->active#2 ->fs_reclaim ->&c->lock ->&____s->seqcount ->&kernfs_locks->open_file_mutex[count] ->pool_lock#2 ->uevent_sock_mutex ->&obj_hash[i].lock ->&n->list_lock ->remove_cache_srcu ->quarantine_lock ->&rq->__lock FD: 1 BD: 6 ....: &nlk->wait FD: 30 BD: 1 +.-.: (&q->timeout) FD: 34 BD: 1 +.+.: (wq_completion)kblockd ->(work_completion)(&q->timeout_work) ->(work_completion)(&(&hctx->run_work)->work) ->(work_completion)(&(&q->requeue_work)->work) FD: 1 BD: 2 +.+.: (work_completion)(&q->timeout_work) FD: 31 BD: 2 +.+.: (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) ->krc.lock ->&obj_hash[i].lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 60 BD: 1 .+.+: kn->active#3 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->pool_lock#2 ->uevent_sock_mutex ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->quarantine_lock ->&n->list_lock ->remove_cache_srcu ->&rq->__lock FD: 109 BD: 5 ++++: kn->active#4 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->pool_lock#2 ->uevent_sock_mutex ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->quarantine_lock ->&____s->seqcount ->&device->physical_node_lock ->udc_lock ->fw_lock ->remove_cache_srcu ->&rfkill->lock ->&rq->__lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&____s->seqcount#2 ->uevent_sock_mutex.wait_lock ->&p->pi_lock ->&cfs_rq->removed.lock ->&root->deactivate_waitq ->&lock->wait_lock ->&base->lock ->&meta->lock FD: 30 BD: 1 ..-.: &(&ovs_net->masks_rebalance)->timer FD: 234 BD: 6 +.+.: (work_completion)(&(&ovs_net->masks_rebalance)->work) ->ovs_mutex ->&obj_hash[i].lock ->&base->lock ->&rq->__lock FD: 1 BD: 7 +.+.: &sk->sk_peer_lock/1 FD: 56 BD: 1 .+.+: kn->active#5 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->param_lock ->&c->lock ->pool_lock#2 ->&on->poll ->&n->list_lock FD: 52 BD: 2 +.+.: &sb->s_type->i_mutex_key#14 ->tk_core.seq.seqcount ->&root->kernfs_iattr_rwsem ->&rq->__lock ->&sem->wait_lock ->&p->pi_lock FD: 1 BD: 49 +.+.: disk_events_mutex FD: 63 BD: 5 +.+.: &sb->s_type->i_mutex_key#13/4 ->&dentry->d_lock ->&simple_offset_xa_lock ->fs_reclaim ->tk_core.seq.seqcount ->rename_lock ->&rq->__lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->rcu_node_0 ->&cfs_rq->removed.lock FD: 30 BD: 387 +.+.: &dentry->d_lock/2 ->&dentry->d_lock/3 FD: 29 BD: 388 +.+.: &dentry->d_lock/3 ->&____s->seqcount#6 ->&wq FD: 1 BD: 390 +.+.: &____s->seqcount#6/1 FD: 52 BD: 1 .+.+: kn->active#6 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock FD: 52 BD: 1 .+.+: kn->active#7 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock FD: 52 BD: 1 .+.+: kn->active#8 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock FD: 48 BD: 1 .+.+: kn->active#9 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->remove_cache_srcu FD: 52 BD: 1 .+.+: kn->active#10 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock FD: 48 BD: 1 .+.+: kn->active#11 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->remove_cache_srcu FD: 52 BD: 1 .+.+: kn->active#12 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock FD: 50 BD: 1 .+.+: kn->active#13 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->dev_base_lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->&____s->seqcount#2 ->remove_cache_srcu ->&rq->__lock FD: 52 BD: 1 .+.+: kn->active#14 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->dev_base_lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->&____s->seqcount#2 ->&rq->__lock ->remove_cache_srcu FD: 50 BD: 1 .+.+: kn->active#15 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->&rq->__lock ->&____s->seqcount#2 ->&____s->seqcount ->remove_cache_srcu FD: 52 BD: 1 .+.+: kn->active#16 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->dev_base_lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->&rq->__lock ->&____s->seqcount#2 ->remove_cache_srcu FD: 54 BD: 1 .+.+: kn->active#17 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->dev_base_lock ->&c->lock ->&n->list_lock FD: 52 BD: 1 .+.+: kn->active#18 ->fs_reclaim ->&c->lock ->&n->list_lock ->&kernfs_locks->open_file_mutex[count] FD: 52 BD: 1 .+.+: kn->active#19 ->fs_reclaim ->&c->lock ->&____s->seqcount ->&kernfs_locks->open_file_mutex[count] FD: 52 BD: 1 .+.+: kn->active#20 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 52 BD: 1 .+.+: kn->active#21 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 48 BD: 1 .+.+: kn->active#22 ->fs_reclaim ->&c->lock ->&kernfs_locks->open_file_mutex[count] ->remove_cache_srcu FD: 52 BD: 1 .+.+: kn->active#23 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock FD: 52 BD: 1 .+.+: kn->active#24 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock FD: 54 BD: 1 .+.+: kn->active#25 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->dev_base_lock FD: 60 BD: 1 .+.+: kn->active#26 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&dev->power.lock ->pci_lock FD: 52 BD: 1 .+.+: kn->active#27 ->fs_reclaim ->&c->lock ->&kernfs_locks->open_file_mutex[count] FD: 52 BD: 1 .+.+: kn->active#28 ->fs_reclaim ->&c->lock ->&n->list_lock ->&kernfs_locks->open_file_mutex[count] FD: 74 BD: 51 +.-.: slock-AF_INET/1 ->tk_core.seq.seqcount ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&base->lock ->&n->list_lock ->&hashinfo->ehash_locks[i] ->&tcp_hashinfo.bhash[i].lock ->&zone->lock ->&____s->seqcount ->quarantine_lock ->&meta->lock ->kfence_freelist_lock ->&sctp_ep_hashtable[i].lock ->clock-AF_INET ->&____s->seqcount#2 ->key#25 ->krc.lock ->&sctp_port_hashtable[i].lock ->batched_entropy_u8.lock FD: 105 BD: 47 +.+.: devnet_rename_sem ->(console_sem).lock ->console_owner_lock ->console_owner ->fs_reclaim ->pool_lock#2 ->&k->list_lock ->&root->kernfs_rwsem ->&c->lock ->&n->list_lock ->kernfs_rename_lock ->uevent_sock_mutex ->&obj_hash[i].lock ->&____s->seqcount ->&rq->__lock ->&sem->wait_lock ->&p->pi_lock ->&____s->seqcount#2 ->remove_cache_srcu ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->quarantine_lock ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->uevent_sock_mutex.wait_lock FD: 1 BD: 192 ....: kernfs_rename_lock FD: 52 BD: 1 .+.+: kn->active#29 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock FD: 52 BD: 1 .+.+: kn->active#30 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock FD: 1 BD: 1 +.+.: &evdev->client_lock FD: 28 BD: 1 +.+.: &evdev->mutex ->&dev->mutex#2 FD: 52 BD: 1 .+.+: kn->active#31 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 52 BD: 1 .+.+: kn->active#32 ->fs_reclaim ->&c->lock ->&kernfs_locks->open_file_mutex[count] ->&n->list_lock FD: 52 BD: 1 .+.+: kn->active#33 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock FD: 1 BD: 1 +.+.: &sb->s_type->i_mutex_key#15 FD: 46 BD: 1 .+.+: mapping.invalidate_lock#2 ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&xa->xa_lock#9 ->lock#4 ->pool_lock#2 ->tk_core.seq.seqcount ->&c->lock ->&rq->__lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 52 BD: 1 .+.+: kn->active#34 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 25 BD: 51 +.+.: &nft_net->commit_mutex ->&rq->__lock FD: 1 BD: 51 ....: target_list_lock FD: 230 BD: 47 +.+.: sk_lock-AF_INET ->slock-AF_INET#2 ->&table->hash[i].lock ->&tcp_hashinfo.bhash[i].lock ->&h->lhash2[i].lock ->&icsk->icsk_accept_queue.rskq_lock ->clock-AF_INET ->&obj_hash[i].lock ->&base->lock ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&mm->mmap_lock ->tk_core.seq.seqcount ->&sd->defer_lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->rcu_node_0 ->&rq->__lock ->remove_cache_srcu ->once_mutex ->batched_entropy_u32.lock ->batched_entropy_u16.lock ->&____s->seqcount#2 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->&dccp_hashinfo.bhash[i].lock ->quarantine_lock ->&meta->lock ->key ->pcpu_lock ->percpu_counters_lock ->stock_lock ->&sctp_port_hashtable[i].lock ->&net->xfrm.xfrm_policy_lock ->l2tp_ip_lock ->&msk->pm.lock ->crngs.lock ->&asoc->wait ->krc.lock ->&list->lock#22 FD: 76 BD: 52 +.-.: slock-AF_INET#2 ->&obj_hash[i].lock ->batched_entropy_u16.lock ->&tcp_hashinfo.bhash[i].lock ->&hashinfo->ehash_locks[i] ->tk_core.seq.seqcount ->(&req->rsk_timer) ->&base->lock ->&icsk->icsk_accept_queue.rskq_lock ->pool_lock#2 ->&c->lock ->&____s->seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock ->&sk->sk_lock.wq ->&n->list_lock ->&dccp_hashinfo.bhash[i].lock ->quarantine_lock ->key#24 ->&meta->lock FD: 1 BD: 73 ++..: clock-AF_INET FD: 1 BD: 5 ....: key#9 FD: 52 BD: 1 .+.+: kn->active#35 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 1 BD: 3 ....: key#10 FD: 1 BD: 1 +.+.: fh->state->lock FD: 1 BD: 5 ....: &vdev->fh_lock FD: 54 BD: 2 +.+.: &dev->dev_mutex ->fs_reclaim ->&c->lock ->pool_lock#2 ->vim2m:1183:(hdl)->_lock ->&____s->seqcount ->&obj_hash[i].lock ->&vdev->fh_lock ->&m2m_dev->job_spinlock ->&q->done_wq ->&q->mmap_lock ->&n->list_lock ->&____s->seqcount#2 ->&rq->__lock ->quarantine_lock FD: 26 BD: 3 +.+.: vim2m:1183:(hdl)->_lock ->&obj_hash[i].lock ->pool_lock#2 ->quarantine_lock ->&rq->__lock FD: 175 BD: 1 +.+.: &mdev->req_queue_mutex ->&vdev->fh_lock ->&mdev->graph_mutex ->vim2m:1183:(hdl)->_lock ->&dev->dev_mutex ->&obj_hash[i].lock ->pool_lock#2 ->&dev_instance->mutex ->vicodec_core:1851:(hdl)->_lock ->&____s->seqcount ->&dev->mutex#3 ->&rq->__lock ->quarantine_lock FD: 1 BD: 4 ....: &m2m_dev->job_spinlock FD: 1 BD: 6 ....: &q->done_wq FD: 25 BD: 6 +.+.: &q->mmap_lock ->&rq->__lock FD: 53 BD: 2 +.+.: &dev_instance->mutex ->fs_reclaim ->&c->lock ->&n->list_lock ->pool_lock#2 ->vicodec_core:1851:(hdl)->_lock ->&vdev->fh_lock ->&m2m_dev->job_spinlock ->&q->done_wq ->&q->mmap_lock ->&obj_hash[i].lock ->&____s->seqcount ->&____s->seqcount#2 ->&rq->__lock ->remove_cache_srcu ->quarantine_lock FD: 29 BD: 3 +.+.: vicodec_core:1851:(hdl)->_lock ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock ->quarantine_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 52 BD: 1 .+.+: kn->active#36 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 52 BD: 1 .+.+: kn->active#37 ->fs_reclaim ->&c->lock ->&____s->seqcount ->&kernfs_locks->open_file_mutex[count] FD: 52 BD: 1 .+.+: kn->active#38 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&____s->seqcount FD: 29 BD: 1 +.+.: &vcapture->lock ->&q->done_wq ->&q->mmap_lock ->&rq->__lock ->&lock->wait_lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 28 BD: 2 +.+.: &dev->mutex#3 ->&vdev->fh_lock ->&q->done_wq ->&q->mmap_lock ->&rq->__lock FD: 27 BD: 11 +.+.: &lo->lo_mutex ->&rq->__lock ->&lock->wait_lock FD: 46 BD: 11 +.+.: &nbd->config_lock ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->&bdev->bd_size_lock ->&q->queue_lock ->&ACCESS_PRIVATE(sdp, lock) ->set->srcu ->&obj_hash[i].lock ->&x->wait#2 ->&rq->__lock ->&c->lock ->&____s->seqcount FD: 30 BD: 8 ....: &ACCESS_PRIVATE(ssp->srcu_sup, lock) ->&obj_hash[i].lock ->&base->lock FD: 26 BD: 111 -...: &x->wait#26 ->&p->pi_lock FD: 1 BD: 114 +.+.: &__ctx->lock FD: 34 BD: 110 -.-.: &fq->mq_flush_lock ->tk_core.seq.seqcount ->&q->requeue_lock ->&obj_hash[i].lock ->&x->wait#26 ->bit_wait_table + i ->quarantine_lock FD: 1 BD: 114 -.-.: &q->requeue_lock FD: 26 BD: 121 -.-.: &rq_wait->wait ->&p->pi_lock FD: 2 BD: 10 +.+.: &new->lock ->&mtdblk->cache_mutex FD: 1 BD: 11 +.+.: &mtdblk->cache_mutex FD: 52 BD: 1 .+.+: kn->active#39 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 165 BD: 1 +.+.: &mtd->master.chrdev_lock ->&mm->mmap_lock FD: 26 BD: 1 +.-.: (&journal->j_commit_timer) ->&p->pi_lock FD: 73 BD: 1 +.+.: &journal->j_checkpoint_mutex ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->tk_core.seq.seqcount ->bit_wait_table + i ->&rq->__lock ->&journal->j_state_lock FD: 26 BD: 111 ....: &journal->j_wait_transaction_locked ->&p->pi_lock FD: 1 BD: 119 -...: &memcg->move_lock FD: 1 BD: 284 -...: key#11 FD: 1 BD: 112 +.+.: &sbi->s_md_lock FD: 1 BD: 1 ....: &journal->j_fc_wait FD: 1 BD: 1 +.+.: &journal->j_history_lock FD: 30 BD: 1 ..-.: &(&wb->dwork)->timer FD: 137 BD: 1 +.+.: (wq_completion)writeback ->(work_completion)(&(&wb->dwork)->work) ->(work_completion)(&(&wb->bw_dwork)->work) ->(work_completion)(&barr->work) FD: 134 BD: 2 +.+.: (work_completion)(&(&wb->dwork)->work) ->&wb->work_lock ->&wb->list_lock ->&p->sequence ->key#11 ->&pl->lock ->&rq->__lock FD: 2 BD: 4 +.-.: &p->sequence ->key#13 FD: 68 BD: 1 .+.+: &type->s_umount_key#40 ->&sb->s_type->i_lock_key#3 ->mmu_notifier_invalidate_range_start ->pool_lock#2 ->tk_core.seq.seqcount ->&c->lock ->lock#4 ->lock#5 ->&wb->list_lock ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->&____s->seqcount#2 ->&____s->seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock ->&n->list_lock FD: 1 BD: 284 -...: &s->s_inode_wblist_lock FD: 1 BD: 285 -...: key#12 FD: 30 BD: 1 ..-.: &(&wb->bw_dwork)->timer FD: 56 BD: 2 +.+.: (work_completion)(&(&wb->bw_dwork)->work) ->&wb->list_lock FD: 52 BD: 1 .+.+: kn->active#40 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 1 BD: 6 +.+.: destroy_lock FD: 30 BD: 1 ..-.: fs/notify/mark.c:89 FD: 61 BD: 2 +.+.: (reaper_work).work ->&rq->__lock ->destroy_lock ->&ACCESS_PRIVATE(sdp, lock) ->&fsnotify_mark_srcu ->&obj_hash[i].lock ->&ACCESS_PRIVATE(ssp->srcu_sup, lock) ->&x->wait#2 ->pool_lock#2 ->&base->lock ->&cfs_rq->removed.lock ->pool_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 61 BD: 2 +.+.: connector_reaper_work ->destroy_lock ->&ACCESS_PRIVATE(sdp, lock) ->&fsnotify_mark_srcu ->&obj_hash[i].lock ->&rq->__lock ->&x->wait#2 ->pool_lock#2 ->pool_lock ->&base->lock ->&cfs_rq->removed.lock ->&ACCESS_PRIVATE(ssp->srcu_sup, lock) ->rcu_node_0 ->&rcu_state.expedited_wq FD: 30 BD: 1 +.-.: (&sdp->delay_work) FD: 12 BD: 48 +...: fib_info_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 78 BD: 48 +...: &net->sctp.local_addr_lock ->&net->sctp.addr_wq_lock FD: 77 BD: 50 +.-.: &net->sctp.addr_wq_lock ->pool_lock#2 ->&obj_hash[i].lock ->&base->lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->&____s->seqcount#2 ->slock-AF_INET/1 ->k-slock-AF_INET6/1 ->slock-AF_INET6/1 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->quarantine_lock FD: 54 BD: 50 +.+.: (work_completion)(&ht->run_work) ->&ht->mutex ->&rq->__lock FD: 52 BD: 51 +.+.: &ht->mutex ->fs_reclaim ->pool_lock#2 ->batched_entropy_u32.lock ->rhashtable_bucket ->&ht->lock ->&c->lock ->remove_cache_srcu ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock ->&obj_hash[i].lock ->rcu_node_0 ->&rq->__lock ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->&base->lock ->batched_entropy_u8.lock ->&cfs_rq->removed.lock FD: 1 BD: 102 ....: rhashtable_bucket/1 FD: 11 BD: 52 +.+.: &ht->lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 2 +...: clock-AF_NETLINK FD: 1 BD: 47 +...: _xmit_LOOPBACK FD: 25 BD: 53 .+.+: netpoll_srcu ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 12 BD: 58 +.-.: &in_dev->mc_tomb_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 19 BD: 54 +.-.: &im->lock ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->&n->list_lock ->&____s->seqcount#2 ->init_task.mems_allowed_seq.seqcount FD: 1 BD: 53 +.+.: cbs_list_lock FD: 10 BD: 51 +...: &net->ipv6.addrconf_hash_lock ->&obj_hash[i].lock FD: 31 BD: 82 +...: &ifa->lock ->batched_entropy_u32.lock ->crngs.lock ->&obj_hash[i].lock ->&base->lock FD: 41 BD: 83 +...: &tb->tb6_lock ->&net->ipv6.fib6_walker_lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->nl_table_lock ->&obj_hash[i].lock ->nl_table_wait.lock ->rlock-AF_NETLINK ->rt6_exception_lock ->&data->fib_event_queue_lock ->&n->list_lock ->quarantine_lock ->&____s->seqcount#2 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&base->lock FD: 1 BD: 84 ++..: &net->ipv6.fib6_walker_lock FD: 210 BD: 2 +.+.: sk_lock-AF_INET6 ->slock-AF_INET6 ->&table->hash[i].lock ->batched_entropy_u32.lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&obj_hash[i].lock ->batched_entropy_u16.lock ->&tcp_hashinfo.bhash[i].lock ->&h->lhash2[i].lock ->fs_reclaim ->&mm->mmap_lock ->once_lock ->clock-AF_INET6 ->&rq->__lock ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock FD: 19 BD: 4 +...: slock-AF_INET6 ->&obj_hash[i].lock ->&tcp_hashinfo.bhash[i].lock ->pool_lock#2 ->key#24 FD: 1 BD: 72 ++..: clock-AF_INET6 FD: 1 BD: 1 +.+.: userns_state_mutex FD: 2 BD: 1 +.+.: sk_lock-AF_UNIX ->slock-AF_UNIX FD: 1 BD: 2 +...: slock-AF_UNIX FD: 1 BD: 8 +.+.: vmap_purge_lock.wait_lock FD: 52 BD: 1 .+.+: kn->active#41 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 165 BD: 47 ++++: dev_addr_sem ->net_rwsem ->&tn->lock ->&sdata->sec_mtx ->fs_reclaim ->&c->lock ->pool_lock#2 ->nl_table_lock ->rlock-AF_NETLINK ->nl_table_wait.lock ->&tbl->lock ->&pn->hash_lock ->&obj_hash[i].lock ->input_pool.lock ->&____s->seqcount ->&n->list_lock ->&rq->__lock ->&br->lock ->_xmit_ETHER ->rcu_node_0 ->&hard_iface->bat_iv.ogm_buff_mutex ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->quarantine_lock ->remove_cache_srcu ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->&rcu_state.expedited_wq ->team->team_lock_key#72 ->team->team_lock_key#76 ->team->team_lock_key#82 ->team->team_lock_key#83 ->team->team_lock_key#84 FD: 712 BD: 3 +.+.: nlk_cb_mutex-GENERIC ->fs_reclaim ->pool_lock#2 ->&____s->seqcount ->rtnl_mutex ->&rdev->wiphy.mtx ->rlock-AF_NETLINK ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount#2 ->&n->list_lock ->&devlink->lock_key#72 ->&devlink->lock_key#76 ->&devlink->lock_key#82 ->&devlink->lock_key#83 ->&devlink->lock_key#84 FD: 19 BD: 51 +...: &rdev->bss_lock ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->krc.lock ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&base->lock FD: 78 BD: 1 +.-.: (&net->sctp.addr_wq_timer) ->&net->sctp.addr_wq_lock FD: 1 BD: 48 +.+.: napi_hash_lock FD: 13 BD: 47 ++..: lapb_list_lock ->pool_lock#2 ->&obj_hash[i].lock ->&base->lock ->&c->lock ->&n->list_lock FD: 4 BD: 47 ++.-: x25_neigh_list_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 42 BD: 2 +.+.: (work_completion)(&aux->work) ->vmap_area_lock ->&obj_hash[i].lock ->purge_vmap_area_lock ->vmap_purge_lock ->pool_lock#2 ->pcpu_lock ->&rq->__lock ->stock_lock ->quarantine_lock FD: 1 BD: 47 +...: _xmit_SLIP FD: 14 BD: 1 +.-.: (&eql->timer) ->&eql->queue.lock ->&obj_hash[i].lock ->&base->lock FD: 4 BD: 50 +.-.: &eql->queue.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 51 BD: 89 +.+.: xps_map_mutex ->fs_reclaim ->pool_lock#2 ->jump_label_mutex ->&rq->__lock FD: 25 BD: 51 +.+.: &data->mutex ->&rq->__lock FD: 17 BD: 68 +...: &local->filter_lock ->pool_lock#2 ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock FD: 30 BD: 69 ..-.: &rdev->wiphy_work_lock FD: 369 BD: 7 +.+.: (work_completion)(&rdev->wiphy_work) ->&rdev->wiphy.mtx ->&lock->wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 47 +...: _xmit_VOID FD: 1 BD: 47 +...: _xmit_X25 FD: 4 BD: 48 +...: &lapbeth->up_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 60 BD: 48 +.-.: &lapb->lock ->&c->lock ->&____s->seqcount ->pool_lock#2 ->&obj_hash[i].lock ->&base->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&n->list_lock ->&list->lock#20 FD: 73 BD: 47 +...: dev->qdisc_tx_busylock ?: &qdisc_tx_busylock ->_xmit_ETHER#2 ->&obj_hash[i].lock ->pool_lock#2 ->&____s->seqcount ->&sch->q.lock ->&r->producer_lock#3 ->_xmit_SLIP#2 ->_xmit_NETROM ->&base->lock ->quarantine_lock ->&meta->lock ->kfence_freelist_lock FD: 57 BD: 73 +.-.: _xmit_ETHER#2 ->&obj_hash[i].lock ->pool_lock#2 ->&meta->lock ->kfence_freelist_lock ->quarantine_lock FD: 2 BD: 110 +.+.: &(ei->i_block_reservation_lock) ->key#15 FD: 32 BD: 2 +.+.: (work_completion)(&work->work) ->devices_rwsem ->&obj_hash[i].lock ->pool_lock#2 ->quarantine_lock ->&rq->__lock ->&meta->lock ->kfence_freelist_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->&base->lock FD: 702 BD: 2 +.+.: (work_completion)(&(&ifa->dad_work)->work) ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 FD: 1 BD: 84 +.-.: rt6_exception_lock FD: 1 BD: 135 +.-.: &ul->lock FD: 1 BD: 9 +.+.: fasync_lock FD: 1 BD: 102 ....: &tty->ctrl.lock FD: 1 BD: 6 +.+.: &buf->lock FD: 1 BD: 10 ....: &tty->flow.lock FD: 25 BD: 2 +.+.: &net->packet.sklist_lock ->&rq->__lock FD: 185 BD: 2 +.+.: sk_lock-AF_PACKET ->slock-AF_PACKET ->&po->bind_lock ->&obj_hash[i].lock ->&x->wait#3 ->&rq->__lock ->&mm->mmap_lock ->fs_reclaim ->&c->lock ->pool_lock#2 ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->pcpu_alloc_mutex ->batched_entropy_u32.lock ->vmap_purge_lock ->&fp->aux->used_maps_mutex ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock ->&rnp->exp_lock ->rcu_state.exp_mutex ->rcu_state.exp_mutex.wait_lock ->&p->pi_lock ->stock_lock FD: 1 BD: 3 +...: slock-AF_PACKET FD: 31 BD: 1 +.-.: &tx->clean_lock ->&obj_hash[i].lock ->pool_lock#2 ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->&base->lock FD: 18 BD: 3 +.+.: &po->bind_lock ->ptype_lock ->pool_lock#2 ->&dir->lock#2 FD: 164 BD: 6 +.+.: &ldata->atomic_read_lock ->&tty->termios_rwsem ->(work_completion)(&buf->work) ->&rq->__lock FD: 30 BD: 1 ..-.: &(&idev->mc_dad_work)->timer FD: 136 BD: 1 +.+.: (wq_completion)mld ->(work_completion)(&(&idev->mc_dad_work)->work) ->(work_completion)(&(&idev->mc_ifc_work)->work) FD: 134 BD: 2 +.+.: (work_completion)(&(&idev->mc_dad_work)->work) ->&idev->mc_lock ->&rq->__lock FD: 1 BD: 1 +.-.: rlock-AF_PACKET FD: 1 BD: 1 +...: wlock-AF_PACKET FD: 1 BD: 52 ....: class FD: 1 BD: 52 ....: (&tbl->proxy_timer) FD: 30 BD: 1 ..-.: &(&idev->mc_ifc_work)->timer FD: 134 BD: 2 +.+.: (work_completion)(&(&idev->mc_ifc_work)->work) ->&idev->mc_lock ->&lock->wait_lock ->&p->pi_lock ->&rq->__lock FD: 17 BD: 56 +.-.: &ul->lock#2 ->pool_lock#2 ->&dir->lock#2 ->&c->lock ->&n->list_lock FD: 15 BD: 117 ++--: &n->lock ->&____s->seqcount#8 ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->&(&n->ha_lock)->lock FD: 1 BD: 119 +.--: &____s->seqcount#8 FD: 1 BD: 128 ...-: &____s->seqcount#9 FD: 30 BD: 1 ..-.: net/core/link_watch.c:31 FD: 80 BD: 48 +.-.: &dev->tx_global_lock ->_xmit_ETHER#2 ->&obj_hash[i].lock ->&base->lock ->_xmit_NETROM ->_xmit_NONE#2 ->_xmit_TUNNEL6#2 ->_xmit_SIT#2 ->_xmit_TUNNEL#2 ->_xmit_IPGRE#2 ->&qdisc_xmit_lock_key ->&qdisc_xmit_lock_key#2 ->&vlan_netdev_xmit_lock_key ->&batadv_netdev_xmit_lock_key ->&qdisc_xmit_lock_key#3 ->&qdisc_xmit_lock_key#4 ->_xmit_LOOPBACK#2 FD: 1 BD: 50 +.-.: &sch->q.lock FD: 1 BD: 116 +.-.: lock#8 FD: 1 BD: 116 ..-.: id_table_lock FD: 34 BD: 2 +.+.: (work_completion)(&w->work)#2 ->pool_lock#2 ->&dir->lock ->&obj_hash[i].lock ->nf_conntrack_mutex ->&rq->__lock FD: 1 BD: 2 +.+.: fanout_mutex FD: 1 BD: 2 +...: clock-AF_PACKET FD: 15 BD: 1 +.-.: (&dom->period_timer) ->key#13 ->&p->sequence ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 286 -.-.: key#13 FD: 30 BD: 1 ..-.: &(&ifa->dad_work)->timer FD: 1 BD: 64 +.-.: &ct->lock FD: 30 BD: 1 ..-.: drivers/base/dd.c:321 FD: 38 BD: 2 +.+.: (deferred_probe_timeout_work).work ->device_links_lock ->deferred_probe_mutex ->&rq->__lock ->deferred_probe_work FD: 2 BD: 284 ....: &pl->lock ->key#12 FD: 1 BD: 118 ....: key#14 FD: 61 BD: 1 +.-.: (&lapb->t1timer) ->&lapb->lock FD: 81 BD: 1 +.-.: (&dev->watchdog_timer) ->&dev->tx_global_lock FD: 75 BD: 110 +.+.: &lg->lg_mutex ->&ei->i_prealloc_lock ->mmu_notifier_invalidate_range_start ->&____s->seqcount ->&c->lock ->pool_lock#2 ->lock#4 ->&mapping->i_private_lock ->&ret->b_state_lock ->&journal->j_revoke_lock ->&pa->pa_lock ->&lg->lg_prealloc_lock FD: 1 BD: 111 +.+.: &pa->pa_lock FD: 1 BD: 111 +.+.: &lg->lg_prealloc_lock FD: 30 BD: 3 -.-.: &ei->i_completed_io_lock FD: 135 BD: 1 +.+.: (wq_completion)ext4-rsv-conversion ->(work_completion)(&ei->i_rsv_conversion_work) FD: 134 BD: 2 +.+.: (work_completion)(&ei->i_rsv_conversion_work) ->&ei->i_completed_io_lock ->&journal->j_state_lock ->jbd2_handle ->&obj_hash[i].lock ->pool_lock#2 ->&ext4__ioend_wq[i] ->&ret->b_uptodate_lock ->&folio_wait_table[i] ->&rq->__lock ->rcu_node_0 FD: 1 BD: 111 ....: &journal->j_wait_reserved FD: 1 BD: 3 ....: &ext4__ioend_wq[i] FD: 5 BD: 113 +.-.: &nf_conntrack_locks[i] ->&nf_conntrack_locks[i]/1 ->batched_entropy_u8.lock FD: 4 BD: 114 +.-.: &nf_conntrack_locks[i]/1 ->batched_entropy_u8.lock FD: 1 BD: 69 +.-.: &hashinfo->ehash_locks[i] FD: 2 BD: 118 +.-.: &(&n->ha_lock)->lock ->&____s->seqcount#8 FD: 1 BD: 53 ..-.: (&req->rsk_timer) FD: 1 BD: 53 +.-.: &icsk->icsk_accept_queue.rskq_lock FD: 1 BD: 48 +.-.: &sd->defer_lock FD: 77 BD: 1 +.-.: (&icsk->icsk_delack_timer) ->slock-AF_INET#2 FD: 77 BD: 1 +.-.: (&icsk->icsk_retransmit_timer) ->slock-AF_INET#2 FD: 7 BD: 52 +.-.: tcp_metrics_lock ->&c->lock ->pool_lock#2 FD: 1 BD: 111 ....: key#15 FD: 71 BD: 109 +.+.: &sbi->s_orphan_lock ->rcu_node_0 ->&rq->__lock ->mmu_notifier_invalidate_range_start ->&ei->i_raw_lock ->&lock->wait_lock ->&mapping->i_private_lock ->&ret->b_state_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->&cfs_rq->removed.lock ->stock_lock ->&rcu_state.expedited_wq FD: 52 BD: 1 .+.+: kn->active#42 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 1 BD: 1 +.+.: &futex_queues[i].lock FD: 1 BD: 4 ....: &on->poll FD: 1 BD: 3 +.+.: module_mutex FD: 3 BD: 48 +.+.: once_mutex ->crngs.lock FD: 30 BD: 1 ..-.: &(&dm_bufio_cleanup_old_work)->timer FD: 15 BD: 1 +.+.: (wq_completion)dm_bufio_cache ->(work_completion)(&(&dm_bufio_cleanup_old_work)->work) FD: 14 BD: 2 +.+.: (work_completion)(&(&dm_bufio_cleanup_old_work)->work) ->dm_bufio_clients_lock ->&obj_hash[i].lock ->&base->lock FD: 30 BD: 1 ..-.: drivers/regulator/core.c:6335 FD: 4 BD: 2 +.+.: (regulator_init_complete_work).work ->&k->list_lock ->&k->k_lock FD: 30 BD: 1 ..-.: &(&tbl->gc_work)->timer FD: 45 BD: 2 +.+.: (work_completion)(&(&tbl->gc_work)->work) ->&tbl->lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 97 BD: 1 +.+.: &type->s_umount_key#41/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&root->kernfs_rwsem ->&c->lock ->&n->list_lock ->&sb->s_type->i_lock_key#30 ->crngs.lock ->&root->kernfs_supers_rwsem ->&dentry->d_lock FD: 37 BD: 247 +.+.: &sb->s_type->i_lock_key#30 ->&dentry->d_lock FD: 733 BD: 1 .+.+: sb_writers#8 ->mount_lock ->&type->i_mutex_dir_key#6 ->fs_reclaim ->&mm->mmap_lock ->&of->mutex ->&obj_hash[i].lock ->&type->i_mutex_dir_key#6/1 ->&c->lock ->&n->list_lock FD: 54 BD: 2 ++++: &type->i_mutex_dir_key#6 ->tomoyo_ss ->tk_core.seq.seqcount ->&root->kernfs_iattr_rwsem ->rename_lock.seqcount ->fs_reclaim ->&dentry->d_lock ->&root->kernfs_rwsem ->&sb->s_type->i_lock_key#30 ->&c->lock FD: 52 BD: 1 ++++: kn->active#43 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->pool_lock#2 FD: 97 BD: 1 +.+.: &type->s_umount_key#42/1 ->fs_reclaim ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->&root->kernfs_rwsem ->&sb->s_type->i_lock_key#31 ->crngs.lock ->&root->kernfs_supers_rwsem ->&dentry->d_lock ->&c->lock FD: 37 BD: 247 +.+.: &sb->s_type->i_lock_key#31 ->&dentry->d_lock FD: 71 BD: 1 +.+.: &type->s_umount_key#43 ->&x->wait#23 ->shrinker_mutex ->&obj_hash[i].lock ->percpu_ref_switch_lock ->&root->kernfs_supers_rwsem ->rename_lock.seqcount ->&dentry->d_lock ->&sb->s_type->i_lock_key#31 ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->inode_hash_lock ->pool_lock#2 ->&fsnotify_mark_srcu ->&dentry->d_lock/1 FD: 1 BD: 1 ..-.: percpu_ref_switch_waitq.lock FD: 711 BD: 2 +.+.: (work_completion)(&cgrp->bpf.release_work) ->cgroup_mutex ->cgroup_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->percpu_ref_switch_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 16 +.+.: cgroup_mutex.wait_lock FD: 716 BD: 1 +.+.: (wq_completion)cgroup_destroy ->(work_completion)(&css->destroy_work) ->(work_completion)(&(&css->destroy_rwork)->work) FD: 711 BD: 2 +.+.: (work_completion)(&css->destroy_work) ->cgroup_mutex ->&obj_hash[i].lock ->pool_lock#2 FD: 714 BD: 2 +.+.: (work_completion)(&(&css->destroy_rwork)->work) ->percpu_ref_switch_lock ->&obj_hash[i].lock ->pool_lock#2 ->&cgrp->pidlist_mutex ->(wq_completion)cgroup_pidlist_destroy ->&wq->mutex ->(work_completion)(&cgrp->release_agent_work) ->cgroup_mutex ->cgroup_rstat_lock ->pcpu_lock ->&root->kernfs_rwsem ->kernfs_idr_lock ->krc.lock ->cgroup_mutex.wait_lock ->&p->pi_lock FD: 1 BD: 3 +.+.: &cgrp->pidlist_mutex FD: 1 BD: 3 +.+.: (wq_completion)cgroup_pidlist_destroy FD: 1 BD: 3 +.+.: (work_completion)(&cgrp->release_agent_work) FD: 733 BD: 1 .+.+: sb_writers#9 ->mount_lock ->&type->i_mutex_dir_key#7 ->fs_reclaim ->&mm->mmap_lock ->&of->mutex ->&obj_hash[i].lock ->&type->i_mutex_dir_key#7/1 ->&c->lock ->&n->list_lock ->&rq->__lock ->remove_cache_srcu FD: 60 BD: 2 ++++: &type->i_mutex_dir_key#7 ->tomoyo_ss ->tk_core.seq.seqcount ->&root->kernfs_iattr_rwsem ->rename_lock.seqcount ->fs_reclaim ->&dentry->d_lock ->&root->kernfs_rwsem ->&sb->s_type->i_lock_key#31 ->pool_lock#2 ->&xa->xa_lock#5 ->&obj_hash[i].lock ->stock_lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount FD: 1 BD: 16 +.+.: &dom->lock FD: 52 BD: 1 .+.+: kn->active#44 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] FD: 227 BD: 1 .+.+: kn->active#45 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->cpu_hotplug_lock FD: 166 BD: 1 .+.+: sb_writers#10 ->&mm->mmap_lock ->&attr->mutex FD: 165 BD: 2 +.+.: &attr->mutex ->&mm->mmap_lock FD: 39 BD: 1 +.+.: &type->s_umount_key#44 ->sb_lock ->&dentry->d_lock FD: 79 BD: 2 +.+.: &sb->s_type->i_mutex_key#16 ->namespace_sem ->rename_lock.seqcount ->fs_reclaim ->&c->lock ->pool_lock#2 ->&dentry->d_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#26 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->init_binfmt_misc.entries_lock FD: 173 BD: 1 .+.+: sb_writers#11 ->fs_reclaim ->pool_lock#2 ->&mm->mmap_lock ->&sb->s_type->i_mutex_key#16 FD: 1 BD: 47 +.+.: &wpan_dev->association_lock FD: 1 BD: 49 +...: &pn->hash_lock FD: 42 BD: 1 +...: &net->ipv6.fib6_gc_lock ->&obj_hash[i].lock FD: 1 BD: 47 +...: _xmit_IEEE802154 FD: 1 BD: 3 +.+.: &xa->xa_lock#16 FD: 1 BD: 6 ....: genl_sk_destructing_waitq.lock FD: 1 BD: 3 +...: &rdev->beacon_registrations_lock FD: 1 BD: 52 +...: &rdev->mgmt_registrations_lock FD: 1 BD: 52 +...: &wdev->pmsr_lock FD: 1 BD: 48 +.+.: reg_indoor_lock FD: 139 BD: 2 +.+.: (work_completion)(&w->w) ->nfc_devlist_mutex ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock ->&meta->lock ->kfence_freelist_lock ->&base->lock ->nfc_devlist_mutex.wait_lock ->&p->pi_lock ->quarantine_lock FD: 1 BD: 5 +.+.: &genl_data->genl_data_mutex FD: 49 BD: 6 +.+.: swap_cgroup_mutex ->fs_reclaim ->&____s->seqcount FD: 1 BD: 6 +.+.: &((cluster_info + ci)->lock)/1 FD: 67 BD: 6 +.+.: swapon_mutex ->fs_reclaim ->pool_lock#2 ->swap_lock ->percpu_ref_switch_lock ->(console_sem).lock FD: 2 BD: 9 +.+.: &p->lock#2 ->swap_avail_lock FD: 1 BD: 10 +.+.: swap_avail_lock FD: 1 BD: 6 ....: proc_poll_wait.lock FD: 229 BD: 1 +.+.: swap_slots_cache_enable_mutex ->cpu_hotplug_lock ->swap_lock FD: 1 BD: 91 +.+.: swap_slots_cache_mutex FD: 1 BD: 148 ....: &newf->resize_wait FD: 11 BD: 123 ..-.: &kcov->lock ->kcov_remote_lock FD: 104 BD: 1 +.+.: pid_caches_mutex ->slab_mutex FD: 39 BD: 1 +.+.: &type->s_umount_key#45 ->sb_lock ->&dentry->d_lock ->&rq->__lock FD: 170 BD: 2 ++++: &sb->s_type->i_mutex_key#17 ->namespace_sem ->&mm->mmap_lock ->vmap_area_lock ->&dentry->d_lock ->tk_core.seq.seqcount FD: 1 BD: 26 ++++: hci_sk_list.lock FD: 1 BD: 1 +.+.: (work_completion)(&(&data->open_timeout)->work) FD: 256 BD: 1 +.+.: &data->open_mutex ->fs_reclaim ->pool_lock#2 ->&____s->seqcount ->&obj_hash[i].lock ->&x->wait#9 ->hci_index_ida.xa_lock ->&c->lock ->&n->list_lock ->pcpu_alloc_mutex ->cpu_hotplug_lock ->wq_pool_mutex ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->&k->list_lock ->gdp_mutex ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->subsys mutex#74 ->&dev->devres_lock ->triggers_list_lock ->leds_list_lock ->rfkill_global_mutex ->&rfkill->lock ->hci_dev_list_lock ->tk_core.seq.seqcount ->hci_sk_list.lock ->(pm_chain_head).rwsem ->&list->lock#5 ->&data->read_wait ->rfkill_global_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->uevent_sock_mutex.wait_lock ->&____s->seqcount#2 ->&sem->wait_lock ->remove_cache_srcu ->&cfs_rq->removed.lock ->pcpu_alloc_mutex.wait_lock ->rcu_node_0 FD: 1 BD: 2 ....: hci_index_ida.xa_lock FD: 3 BD: 23 +.+.: subsys mutex#74 ->&k->k_lock FD: 1 BD: 14 ++++: hci_dev_list_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1033 FD: 170 BD: 9 +.+.: (work_completion)(&hdev->power_on) ->&hdev->req_lock ->fs_reclaim ->&c->lock ->pool_lock#2 ->tk_core.seq.seqcount ->hci_sk_list.lock ->&obj_hash[i].lock ->&____s->seqcount ->&n->list_lock ->&____s->seqcount#2 FD: 1 BD: 23 ....: &list->lock#5 FD: 169 BD: 11 +.+.: &hdev->req_lock ->&obj_hash[i].lock ->pool_lock#2 ->&list->lock#6 ->&list->lock#7 ->&hdev->req_wait_q ->&base->lock ->&rq->__lock ->(&timer.timer) ->&c->lock ->&____s->seqcount ->tk_core.seq.seqcount ->hci_sk_list.lock ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->(work_completion)(&(&hdev->interleave_scan)->work) ->hci_dev_list_lock ->(work_completion)(&hdev->tx_work) ->(work_completion)(&hdev->rx_work) ->&wq->mutex ->&hdev->lock ->&list->lock#5 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&(&hdev->cmd_timer)->work) ->&n->list_lock ->pool_lock ->&____s->seqcount#2 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->(wq_completion)hci0#10 ->(wq_completion)hci5#4 ->(wq_completion)hci1#2 FD: 26 BD: 23 ....: &data->read_wait ->&p->pi_lock FD: 1 BD: 12 ....: &list->lock#6 FD: 1 BD: 24 ....: &list->lock#7 FD: 26 BD: 21 ....: &hdev->req_wait_q ->&p->pi_lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#287 ->&rq->__lock FD: 50 BD: 20 +.+.: (work_completion)(&hdev->cmd_work) ->&list->lock#7 ->fs_reclaim ->pool_lock#2 ->tk_core.seq.seqcount ->&list->lock#5 ->&data->read_wait ->&obj_hash[i].lock ->&base->lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->remove_cache_srcu ->&rq->__lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&____s->seqcount#2 ->rcu_node_0 FD: 50 BD: 2 +.+.: sk_lock-AF_BLUETOOTH-BTPROTO_HCI ->slock-AF_BLUETOOTH-BTPROTO_HCI ->sock_cookie_ida.xa_lock ->&p->alloc_lock ->pool_lock#2 ->tk_core.seq.seqcount ->hci_sk_list.lock ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->clock-AF_BLUETOOTH ->&rq->__lock ->&n->list_lock ->&____s->seqcount#2 FD: 1 BD: 3 +...: slock-AF_BLUETOOTH-BTPROTO_HCI FD: 1 BD: 3 ....: sock_cookie_ida.xa_lock FD: 153 BD: 20 +.+.: (work_completion)(&hdev->rx_work) ->&list->lock#7 ->lock#6 ->fs_reclaim ->pool_lock#2 ->&c->lock ->free_vmap_area_lock ->vmap_area_lock ->&____s->seqcount ->init_mm.page_table_lock ->&hdev->lock ->(console_sem).lock ->console_owner_lock ->console_owner ->&rq->__lock ->&obj_hash[i].lock ->&hdev->req_wait_q ->&base->lock ->&meta->lock ->kfence_freelist_lock ->chan_list_lock ->&n->list_lock ->rcu_node_0 ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->remove_cache_srcu ->batched_entropy_u8.lock ->&rcu_state.expedited_wq ->quarantine_lock FD: 140 BD: 21 +.+.: &hdev->lock ->&xa->xa_lock#17 ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&x->wait#9 ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->sysfs_symlink_target_lock ->&c->lock ->&n->list_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->&k->k_lock ->subsys mutex#74 ->&list->lock#7 ->&hdev->unregister_lock ->hci_cb_list_lock ->&base->lock ->tk_core.seq.seqcount ->hci_sk_list.lock ->&____s->seqcount ->(work_completion)(&(&conn->disc_work)->work) ->(work_completion)(&(&conn->auto_accept_work)->work) ->(work_completion)(&(&conn->idle_work)->work) ->&x->wait#3 ->&rq->__lock ->dev_pm_qos_sysfs_mtx ->kernfs_idr_lock ->deferred_probe_mutex ->device_links_lock ->mmu_notifier_invalidate_range_start ->&sem->wait_lock ->&p->pi_lock ->&____s->seqcount#2 ->quarantine_lock ->remove_cache_srcu ->hci_cb_list_lock.wait_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->pool_lock ->uevent_sock_mutex.wait_lock ->&cfs_rq->removed.lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#15 FD: 3 BD: 5 +.+.: rdma_nets_rwsem ->rdma_nets.xa_lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#337 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1006 ->&rq->__lock FD: 10 BD: 22 ....: &xa->xa_lock#17 ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&n->list_lock FD: 52 BD: 23 +.+.: &hdev->unregister_lock ->fs_reclaim ->pool_lock#2 ->&hdev->cmd_sync_work_lock ->&rq->__lock ->&c->lock ->&n->list_lock ->remove_cache_srcu FD: 1 BD: 24 +.+.: &hdev->cmd_sync_work_lock FD: 170 BD: 9 +.+.: (work_completion)(&hdev->cmd_sync_work) ->&hdev->cmd_sync_work_lock ->&hdev->req_lock ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock FD: 25 BD: 23 +.+.: &conn->ident_lock ->&rq->__lock FD: 1 BD: 24 ....: &list->lock#8 FD: 1 BD: 25 +.+.: &conn->chan_lock FD: 30 BD: 20 +.+.: (work_completion)(&hdev->tx_work) ->&list->lock#8 ->tk_core.seq.seqcount ->&list->lock#5 ->&data->read_wait ->&list->lock#7 ->&rq->__lock FD: 2 BD: 20 +.+.: (work_completion)(&conn->pending_rx_work) ->&list->lock#9 FD: 1 BD: 21 ....: &list->lock#9 FD: 1 BD: 3 +...: clock-AF_BLUETOOTH FD: 1 BD: 1 +.+.: &undo_list->lock FD: 1 BD: 47 +...: &nr_netdev_addr_lock_key FD: 1 BD: 47 +...: listen_lock FD: 2 BD: 6 +.+.: rdma_nets.xa_lock ->pool_lock#2 FD: 1 BD: 163 +.+.: uevent_sock_mutex.wait_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_rx_wq#2 FD: 1 BD: 4 +.+.: &____s->seqcount#10 FD: 2 BD: 3 +.+.: &(&net->ipv4.ping_group_range.lock)->lock ->&____s->seqcount#10 FD: 2 BD: 47 +.+.: &r->consumer_lock ->&r->producer_lock FD: 1 BD: 57 +...: &r->producer_lock FD: 8 BD: 55 +...: &bridge_netdev_addr_lock_key ->pool_lock#2 ->&c->lock ->&n->list_lock FD: 36 BD: 50 +.-.: &br->hash_lock ->&____s->seqcount ->&c->lock ->pool_lock#2 ->nl_table_lock ->&obj_hash[i].lock ->nl_table_wait.lock ->&n->list_lock ->&____s->seqcount#2 ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&base->lock FD: 1 BD: 47 +.+.: j1939_netdev_lock FD: 8 BD: 55 +...: &dev_addr_list_lock_key#2 ->pool_lock#2 ->&c->lock ->&n->list_lock FD: 9 BD: 47 +...: &bat_priv->tvlv.handler_list_lock ->pool_lock#2 ->&____s->seqcount ->&c->lock ->&n->list_lock FD: 14 BD: 54 +...: &bat_priv->tvlv.container_list_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->&____s->seqcount#2 ->quarantine_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 8 BD: 55 +...: &batadv_netdev_addr_lock_key ->&c->lock ->pool_lock#2 ->&n->list_lock FD: 10 BD: 56 +...: &bat_priv->softif_vlan_list_lock ->pool_lock#2 ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 18 BD: 55 +...: key#16 ->&bat_priv->softif_vlan_list_lock ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock FD: 4 BD: 54 +...: &bat_priv->tt.changes_list_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 30 BD: 1 ..-.: &(&bat_priv->nc.work)->timer FD: 61 BD: 1 +.+.: (wq_completion)bat_events ->(work_completion)(&(&bat_priv->nc.work)->work) ->(work_completion)(&(&bat_priv->mcast.work)->work) ->(work_completion)(&(&bat_priv->orig_work)->work) ->(work_completion)(&(&forw_packet_aggr->delayed_work)->work) ->(work_completion)(&(&bat_priv->tt.work)->work) ->&rq->__lock ->(work_completion)(&(&bat_priv->dat.work)->work) ->(work_completion)(&(&bat_priv->bla.work)->work) ->(work_completion)(&barr->work) ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 31 BD: 6 +.+.: (work_completion)(&(&bat_priv->nc.work)->work) ->key#17 ->key#18 ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq ->pool_lock#2 ->&cfs_rq->removed.lock FD: 1 BD: 7 +...: key#17 FD: 1 BD: 7 +...: key#18 FD: 104 BD: 48 +.+.: init_lock ->slab_mutex ->fs_reclaim ->&zone->lock ->&____s->seqcount ->&obj_hash[i].lock ->&base->lock ->crngs.lock FD: 1 BD: 67 +.-.: deferred_lock FD: 702 BD: 2 +.+.: deferred_process_work ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock FD: 50 BD: 49 +.-.: &br->lock ->&br->hash_lock ->lweventlist_lock ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->&dir->lock#2 ->deferred_lock ->(console_sem).lock ->&c->lock ->&____s->seqcount ->nl_table_lock ->nl_table_wait.lock ->&br->multicast_lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->quarantine_lock ->&meta->lock ->&____s->seqcount#2 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#332 ->&rq->__lock FD: 35 BD: 56 +.+.: (work_completion)(&(&slave->notify_work)->work) ->&obj_hash[i].lock ->&base->lock ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock FD: 1 BD: 48 +.+.: &bond->stats_lock/1 FD: 30 BD: 1 ..-.: &(&slave->notify_work)->timer FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#488 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1174 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1277 FD: 40 BD: 50 +.+.: &hard_iface->bat_iv.ogm_buff_mutex ->crngs.lock ->pool_lock#2 ->batched_entropy_u8.lock ->&bat_priv->forw_bat_list_lock ->&____s->seqcount ->&obj_hash[i].lock ->&c->lock ->rcu_node_0 ->&rq->__lock ->&bat_priv->tt.commit_lock ->&bat_priv->tvlv.container_list_lock ->kfence_freelist_lock ->&____s->seqcount#2 ->&rcu_state.expedited_wq ->&n->list_lock ->&cfs_rq->removed.lock ->&lock->wait_lock FD: 13 BD: 51 +...: &bat_priv->forw_bat_list_lock ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 47 +...: _xmit_NONE FD: 1 BD: 47 +...: lock#9 FD: 59 BD: 2 +.+.: (work_completion)(&fqdir->destroy_work) ->(work_completion)(&ht->run_work) ->&ht->mutex FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#771 ->&rq->__lock FD: 1 BD: 48 ...-: &____s->seqcount#11 FD: 30 BD: 1 ..-.: &(&bat_priv->mcast.work)->timer FD: 37 BD: 6 +.+.: (work_completion)(&(&bat_priv->mcast.work)->work) ->pool_lock#2 ->&bat_priv->mcast.mla_lock ->&obj_hash[i].lock ->&base->lock ->kfence_freelist_lock ->&meta->lock ->rcu_node_0 ->&rq->__lock ->&cfs_rq->removed.lock ->quarantine_lock FD: 23 BD: 7 +.+.: &bat_priv->mcast.mla_lock ->pool_lock#2 ->key#16 ->&bat_priv->tt.changes_list_lock ->&bat_priv->tvlv.container_list_lock ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->&meta->lock ->kfence_freelist_lock ->&n->list_lock FD: 1 BD: 59 +.-.: &hsr->list_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1045 FD: 1 BD: 5 +.+.: (wq_completion)tipc_crypto#9 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1025 ->&rq->__lock FD: 8 BD: 55 +...: &vlan_netdev_addr_lock_key ->pool_lock#2 ->&c->lock ->&n->list_lock FD: 10 BD: 55 +...: &macvlan_netdev_addr_lock_key ->&____s->seqcount ->pool_lock#2 ->&c->lock ->&____s->seqcount#2 ->&n->list_lock FD: 16 BD: 47 +.-.: (&app->join_timer) ->&app->lock ->&list->lock#10 FD: 14 BD: 49 +.-.: &app->lock ->batched_entropy_u32.lock ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 48 ..-.: &list->lock#10 FD: 8 BD: 55 +...: &dev_addr_list_lock_key#3 ->pool_lock#2 ->&c->lock ->&n->list_lock FD: 1 BD: 47 ....: &xa->xa_lock#18 FD: 30 BD: 1 ..-.: &(&bat_priv->orig_work)->timer FD: 29 BD: 6 +.+.: (work_completion)(&(&bat_priv->orig_work)->work) ->key#19 ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->pool_lock#2 FD: 30 BD: 1 ..-.: drivers/net/wireguard/ratelimiter.c:20 FD: 1 BD: 49 +...: key#19 FD: 29 BD: 2 +.+.: (gc_work).work ->tk_core.seq.seqcount ->"ratelimiter_table_lock" ->&rq->__lock ->&obj_hash[i].lock ->&base->lock ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->pool_lock#2 FD: 1 BD: 3 +.+.: "ratelimiter_table_lock" FD: 16 BD: 47 +.-.: (&app->join_timer)#2 ->&app->lock#2 ->&list->lock#11 ->batched_entropy_u32.lock ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 48 +.-.: &app->lock#2 FD: 1 BD: 48 ..-.: &list->lock#11 FD: 17 BD: 55 +...: &dev_addr_list_lock_key#3/1 ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock ->&c->lock ->&n->list_lock FD: 2 BD: 47 +.+.: &tap_major->minor_lock ->pool_lock#2 FD: 3 BD: 47 +.+.: subsys mutex#75 ->&k->k_lock FD: 722 BD: 1 .+.+: kn->active#46 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->nsim_bus_dev_list_lock ->&c->lock ->nsim_bus_dev_list_lock.wait_lock ->&p->pi_lock ->&rq->__lock ->remove_cache_srcu ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 721 BD: 9 +.+.: nsim_bus_dev_list_lock ->fs_reclaim ->pool_lock#2 ->nsim_bus_dev_ids.xa_lock ->&x->wait#9 ->&obj_hash[i].lock ->&k->list_lock ->lock ->&root->kernfs_rwsem ->bus_type_sem ->&rq->__lock ->&c->lock ->&____s->seqcount ->sysfs_symlink_target_lock ->&k->k_lock ->&dev->power.lock ->dpm_list_mtx ->uevent_sock_mutex ->device_links_lock ->nsim_bus_dev_list_lock.wait_lock ->deferred_probe_mutex ->subsys mutex#76 ->dev_pm_qos_sysfs_mtx ->kernfs_idr_lock ->mmu_notifier_invalidate_range_start ->&n->list_lock ->&____s->seqcount#2 ->remove_cache_srcu ->uevent_sock_mutex.wait_lock ->&p->pi_lock ->&sem->wait_lock ->quarantine_lock ->&lock->wait_lock FD: 722 BD: 1 .+.+: kn->active#47 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->nsim_bus_dev_list_lock ->nsim_bus_dev_list_lock.wait_lock ->&p->pi_lock ->&c->lock ->&____s->seqcount ->&rq->__lock ->&n->list_lock ->&lock->wait_lock FD: 1 BD: 10 ....: nsim_bus_dev_ids.xa_lock FD: 4 BD: 18 +.+.: devlinks.xa_lock ->pool_lock#2 ->&obj_hash[i].lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#378 FD: 13 BD: 18 +.+.: &xa->xa_lock#19 ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock FD: 30 BD: 1 ..-.: &(&nsim_dev->trap_data->trap_report_dw)->timer FD: 32 BD: 2 +.+.: (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->rcu_node_0 FD: 1 BD: 85 +...: &data->fib_event_queue_lock FD: 1 BD: 18 ....: &(&fn_net->fib_chain)->lock FD: 58 BD: 2 +.+.: (work_completion)(&data->fib_event_work) ->&data->fib_event_queue_lock ->&data->fib_lock ->rcu_node_0 ->&rq->__lock FD: 56 BD: 3 +.+.: &data->fib_lock ->fs_reclaim ->pool_lock#2 ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->(&timer.timer) ->&c->lock ->&n->list_lock ->remove_cache_srcu ->batched_entropy_u8.lock ->kfence_freelist_lock ->rcu_node_0 ->&____s->seqcount ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->pool_lock ->&meta->lock ->quarantine_lock ->&rcu_state.expedited_wq FD: 55 BD: 51 +.+.: bpf_devs_lock ->fs_reclaim ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->&rq->__lock ->&n->list_lock FD: 30 BD: 1 ..-.: &(&forw_packet_aggr->delayed_work)->timer FD: 25 BD: 47 +.+.: (work_completion)(&(&devlink_port->type_warn_dw)->work) ->&rq->__lock FD: 1 BD: 47 +...: &devlink_port->type_lock FD: 1 BD: 47 +.+.: &vn->sock_lock FD: 42 BD: 48 +.+.: (work_completion)(&(&forw_packet_aggr->delayed_work)->work) ->&hard_iface->bat_iv.ogm_buff_mutex ->&bat_priv->forw_bat_list_lock ->&obj_hash[i].lock ->pool_lock#2 ->&c->lock ->&meta->lock ->kfence_freelist_lock ->&____s->seqcount#2 ->&____s->seqcount ->&rq->__lock ->&cfs_rq->removed.lock ->&n->list_lock ->batched_entropy_u8.lock ->quarantine_lock ->&base->lock ->rcu_node_0 ->&lock->wait_lock ->&p->pi_lock ->&rcu_state.expedited_wq FD: 1 BD: 10 +.+.: nsim_bus_dev_list_lock.wait_lock FD: 1 BD: 10 +.+.: subsys mutex#76 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#981 FD: 30 BD: 1 ..-.: &(&hwstats->traffic_dw)->timer FD: 26 BD: 2 +.+.: (work_completion)(&(&hwstats->traffic_dw)->work) ->&hwstats->hwsdev_list_lock ->&obj_hash[i].lock ->&base->lock ->&rq->__lock FD: 25 BD: 48 +.+.: &hwstats->hwsdev_list_lock ->&rq->__lock FD: 17 BD: 20 +.+.: &nsim_trap_data->trap_lock ->pool_lock#2 ->crngs.lock ->&nsim_dev->fa_cookie_lock ->&obj_hash[i].lock ->&c->lock ->&____s->seqcount ->quarantine_lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock ->&____s->seqcount#2 ->&base->lock FD: 1 BD: 21 +...: &nsim_dev->fa_cookie_lock FD: 314 BD: 47 +.+.: &wg->device_update_lock ->&wg->static_identity.lock ->fs_reclaim ->&____s->seqcount ->&c->lock ->pool_lock#2 ->pcpu_alloc_mutex ->&handshake->lock ->&obj_hash[i].lock ->tk_core.seq.seqcount ->&table->lock ->&peer->endpoint_lock ->pool_lock ->&rq->__lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#8 ->&dir->lock ->k-slock-AF_INET/1 ->k-sk_lock-AF_INET ->k-slock-AF_INET#2 ->cpu_hotplug_lock ->k-sk_lock-AF_INET6 ->k-slock-AF_INET6 ->&wg->socket_update_lock ->&list->lock#14 ->&rnp->exp_wq[3] ->&____s->seqcount#2 ->&n->list_lock ->&x->wait#3 ->&cfs_rq->removed.lock ->&table->hash[i].lock ->k-clock-AF_INET ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->k-clock-AF_INET6 ->(&peer->timer_retransmit_handshake) ->&base->lock ->(&peer->timer_send_keepalive) ->(&peer->timer_new_handshake) ->(&peer->timer_zero_key_material) ->(&peer->timer_persistent_keepalive) ->(work_completion)(&peer->clear_peer_work) ->&peer->keypairs.keypair_update_lock ->&wq->mutex ->napi_hash_lock ->&table->lock#2 ->wq_pool_mutex ->wq_mayday_lock ->&p->pi_lock ->&x->wait ->pcpu_lock ->&r->consumer_lock#2 ->&zone->lock ->rcu_state.barrier_mutex ->init_lock ->rcu_state.barrier_mutex.wait_lock ->&rnp->exp_lock ->rcu_state.exp_mutex ->rcu_state.exp_mutex.wait_lock ->quarantine_lock ->remove_cache_srcu ->(wq_completion)wg-crypt-wg2#77 ->(wq_completion)wg-kex-wg2#152 ->(wq_completion)wg-kex-wg2#153 ->(wq_completion)wg-crypt-wg1#77 ->(wq_completion)wg-kex-wg1#153 ->(wq_completion)wg-kex-wg1#154 ->(wq_completion)wg-crypt-wg0#78 ->(wq_completion)wg-kex-wg0#155 ->(wq_completion)wg-kex-wg0#156 ->(wq_completion)wg-crypt-wg2#79 ->(wq_completion)wg-kex-wg2#157 ->(wq_completion)wg-kex-wg2#156 ->(wq_completion)wg-crypt-wg1#79 ->(wq_completion)wg-kex-wg1#157 ->(wq_completion)wg-kex-wg1#158 ->(wq_completion)wg-crypt-wg0#80 ->(wq_completion)wg-kex-wg0#159 ->(wq_completion)wg-kex-wg0#160 ->(wq_completion)wg-crypt-wg2#78 ->(wq_completion)wg-kex-wg2#154 ->(wq_completion)wg-kex-wg2#155 ->(wq_completion)wg-crypt-wg1#78 ->(wq_completion)wg-kex-wg1#155 ->(wq_completion)wg-kex-wg1#156 ->(wq_completion)wg-crypt-wg0#79 ->(wq_completion)wg-kex-wg0#157 ->(wq_completion)wg-kex-wg0#158 FD: 50 BD: 122 ++++: &wg->static_identity.lock ->&handshake->lock ->&rq->__lock ->&sem->wait_lock ->&p->pi_lock FD: 49 BD: 123 ++++: &handshake->lock ->crngs.lock ->tk_core.seq.seqcount ->&table->lock#2 ->fs_reclaim ->pool_lock#2 ->&rq->__lock ->&c->lock ->&____s->seqcount ->&____s->seqcount#2 ->batched_entropy_u8.lock ->kfence_freelist_lock ->&n->list_lock ->&cfs_rq->removed.lock ->remove_cache_srcu ->&sem->wait_lock ->&obj_hash[i].lock FD: 1 BD: 48 +.+.: &table->lock FD: 49 BD: 124 ++-.: &peer->endpoint_lock ->pool_lock#2 ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#6 FD: 25 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#13 ->&rq->__lock FD: 30 BD: 1 ..-.: &(&hdev->cmd_timer)->timer FD: 41 BD: 20 +.+.: (work_completion)(&(&hdev->cmd_timer)->work) ->(console_sem).lock ->console_owner_lock ->console_owner ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 47 +...: &pmc->lock FD: 1 BD: 47 +...: _xmit_SIT FD: 17 BD: 55 +...: &bridge_netdev_addr_lock_key/1 ->pool_lock#2 ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 39 BD: 47 +.-.: (&brmctx->ip6_own_query.timer) ->&br->multicast_lock FD: 38 BD: 66 +.-.: &br->multicast_lock ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->&dir->lock#2 ->deferred_lock ->nl_table_lock ->nl_table_wait.lock ->&c->lock ->&____s->seqcount ->&n->list_lock ->&____s->seqcount#2 ->quarantine_lock FD: 39 BD: 47 +.-.: (&brmctx->ip4_own_query.timer) ->&br->multicast_lock FD: 53 BD: 1 +.-.: (&in_dev->mr_ifc_timer) ->&obj_hash[i].lock ->batched_entropy_u32.lock ->&base->lock FD: 1 BD: 4 +.+.: genl_mutex.wait_lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1024 ->&rq->__lock FD: 15 BD: 47 +.-.: (&app->periodic_timer) ->&app->lock FD: 30 BD: 1 ..-.: &(&br->gc_work)->timer FD: 13 BD: 48 +.+.: (work_completion)(&(&br->gc_work)->work) ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 47 +...: _xmit_TUNNEL FD: 17 BD: 47 +...: _xmit_IPGRE ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock ->&n->list_lock FD: 1 BD: 47 +...: _xmit_TUNNEL6 FD: 48 BD: 50 +.-.: _xmit_TUNNEL6#2 ->&obj_hash[i].lock ->pool_lock#2 FD: 43 BD: 55 +...: &dev_addr_list_lock_key/1 ->_xmit_ETHER ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&n->list_lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#678 ->&rq->__lock FD: 1 BD: 161 ..-.: &list->lock#12 FD: 43 BD: 55 +...: &dev_addr_list_lock_key#2/1 ->_xmit_ETHER ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&n->list_lock FD: 39 BD: 1 +.-.: (&pmctx->ip6_own_query.timer) ->&br->multicast_lock FD: 39 BD: 1 +.-.: (&pmctx->ip4_own_query.timer) ->&br->multicast_lock FD: 43 BD: 55 +...: _xmit_ETHER/1 ->_xmit_ETHER ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->krc.lock ->pool_lock#2 ->&n->list_lock FD: 21 BD: 57 +.-.: &hsr->seqnr_lock ->pool_lock#2 ->&obj_hash[i].lock ->quarantine_lock ->&meta->lock ->kfence_freelist_lock ->&base->lock FD: 1 BD: 58 +.-.: &new_node->seq_out_lock FD: 22 BD: 47 +.-.: (&hsr->announce_timer) FD: 1 BD: 47 +.+.: &nn->netlink_tap_lock FD: 17 BD: 55 +...: &batadv_netdev_addr_lock_key/1 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->krc.lock ->&n->list_lock FD: 43 BD: 55 +...: &vlan_netdev_addr_lock_key/1 ->_xmit_ETHER ->pool_lock#2 ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->krc.lock ->&n->list_lock ->&____s->seqcount#2 FD: 43 BD: 55 +...: &macvlan_netdev_addr_lock_key/1 ->_xmit_ETHER ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->krc.lock ->&____s->seqcount#2 ->&n->list_lock FD: 1 BD: 49 +.-.: &list->lock#13 FD: 30 BD: 48 +.+.: (work_completion)(&port->bc_work) ->&list->lock#13 ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock ->&meta->lock ->kfence_freelist_lock ->rcu_node_0 ->quarantine_lock ->&base->lock FD: 17 BD: 48 +...: &ipvlan->addrs_lock ->pool_lock#2 ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&n->list_lock FD: 43 BD: 55 +...: &macsec_netdev_addr_lock_key/1 ->_xmit_ETHER ->&c->lock ->&____s->seqcount ->&obj_hash[i].lock ->krc.lock ->&n->list_lock FD: 17 BD: 57 +...: key#20 ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock FD: 24 BD: 51 +...: &bat_priv->tt.commit_lock ->key#16 ->&bat_priv->softif_vlan_list_lock ->&bat_priv->tt.changes_list_lock ->&bat_priv->tt.last_changeset_lock ->pool_lock#2 ->&bat_priv->tvlv.container_list_lock ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->&meta->lock ->kfence_freelist_lock FD: 15 BD: 47 +...: dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 ->&sch->q.lock FD: 25 BD: 48 +.+.: &wg->socket_update_lock ->&rq->__lock FD: 1 BD: 100 +.-.: &list->lock#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#4 FD: 51 BD: 72 +.+.: (work_completion)(&peer->transmit_handshake_work) ->tk_core.seq.seqcount ->&wg->static_identity.lock ->&cookie->lock ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->&peer->endpoint_lock ->batched_entropy_u8.lock ->&c->lock ->&____s->seqcount ->&rq->__lock ->&____s->seqcount#2 ->&cfs_rq->removed.lock ->&n->list_lock ->rcu_node_0 ->&rcu_state.expedited_wq ->kfence_freelist_lock FD: 1 BD: 125 +...: &table->lock#2 FD: 25 BD: 122 ++++: &cookie->lock ->&rq->__lock FD: 1 BD: 57 +...: &entry->crc_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#285 FD: 1 BD: 100 +.-.: &r->producer_lock#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#4 FD: 83 BD: 96 +.+.: (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->&r->consumer_lock#2 ->&wg->static_identity.lock ->&peer->endpoint_lock ->tk_core.seq.seqcount ->&cookie->lock ->&handshake->lock ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->&rq->__lock ->&list->lock#14 ->&____s->seqcount ->&c->lock ->&____s->seqcount#2 ->rcu_node_0 ->&cfs_rq->removed.lock ->&meta->lock ->kfence_freelist_lock ->&n->list_lock ->&rcu_state.expedited_wq FD: 1 BD: 97 +.+.: &r->consumer_lock#2 FD: 5 BD: 124 +.-.: &peer->keypairs.keypair_update_lock ->&table->lock#2 ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#5 FD: 27 BD: 72 +.+.: (work_completion)(&peer->transmit_packet_work) ->&obj_hash[i].lock ->&peer->endpoint_lock ->&base->lock ->&rq->__lock ->batched_entropy_u8.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock ->pool_lock#2 FD: 25 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#4 ->&rq->__lock FD: 1 BD: 1 +.-.: &keypair->receiving_counter.lock FD: 1 BD: 2 +.+.: (wq_completion)nfc40_nci_cmd_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#440 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#658 ->&rq->__lock FD: 25 BD: 14 +.+.: &data->mtx ->&rq->__lock FD: 1 BD: 51 ....: &wdev->event_lock FD: 1 BD: 51 ....: (&dwork->timer) FD: 1 BD: 51 ....: (&dwork->timer)#2 FD: 1 BD: 51 +.+.: (work_completion)(&(&link->color_collision_detect_work)->work) FD: 9 BD: 52 +...: &bat_priv->tt.last_changeset_lock ->pool_lock#2 ->&obj_hash[i].lock ->&c->lock FD: 1 BD: 52 ..-.: &list->lock#15 FD: 1 BD: 51 +.-.: &ifibss->incomplete_lock FD: 704 BD: 1 +.+.: (wq_completion)cfg80211 ->(work_completion)(&rdev->event_work) ->(work_completion)(&(&rdev->dfs_update_channels_wk)->work) ->(work_completion)(&barr->work) FD: 369 BD: 6 +.+.: (work_completion)(&rdev->event_work) ->&rdev->wiphy.mtx ->&lock->wait_lock ->&p->pi_lock ->&rq->__lock FD: 32 BD: 2 +.+.: wireless_nlevent_work ->net_rwsem FD: 2 BD: 1 +.-.: (&tun->flow_gc_timer) ->&tun->lock FD: 1 BD: 48 +.-.: &tun->lock FD: 1 BD: 75 +.-.: &local->active_txq_lock[i] FD: 38 BD: 74 +.-.: &local->handle_wake_tx_queue_lock ->&local->active_txq_lock[i] ->&local->queue_stop_reason_lock ->&fq->lock ->tk_core.seq.seqcount ->hwsim_radio_lock ->&list->lock#16 FD: 1 BD: 75 ..-.: &local->queue_stop_reason_lock FD: 1 BD: 77 ..-.: &list->lock#16 FD: 32 BD: 1 +.-.: &local->rx_path_lock ->&obj_hash[i].lock ->pool_lock#2 ->&list->lock#15 ->&rdev->wiphy_work_lock FD: 17 BD: 51 +...: &sta->lock ->pool_lock#2 ->&obj_hash[i].lock ->krc.lock ->&c->lock ->&n->list_lock FD: 17 BD: 51 +...: &sta->rate_ctrl_lock ->pool_lock#2 ->&c->lock ->&obj_hash[i].lock ->krc.lock ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#564 ->&rq->__lock FD: 80 BD: 1 +.+.: &type->s_umount_key#46/1 ->fs_reclaim ->&c->lock ->pool_lock#2 ->pcpu_alloc_mutex ->shrinker_mutex ->list_lrus_mutex ->sb_lock ->mmu_notifier_invalidate_range_start ->&sb->s_type->i_lock_key#32 ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&____s->seqcount ->binderfs_minors_mutex ->&dentry->d_lock ->&sb->s_type->i_mutex_key#18 ->&____s->seqcount#2 ->&n->list_lock ->&rq->__lock FD: 37 BD: 4 +.+.: &sb->s_type->i_lock_key#32 ->&dentry->d_lock FD: 2 BD: 3 +.+.: binderfs_minors_mutex ->binderfs_minors.xa_lock FD: 1 BD: 4 ....: binderfs_minors.xa_lock FD: 64 BD: 2 +.+.: &sb->s_type->i_mutex_key#18 ->&sb->s_type->i_lock_key#32 ->rename_lock.seqcount ->fs_reclaim ->pool_lock#2 ->&dentry->d_lock ->mmu_notifier_invalidate_range_start ->&s->s_inode_list_lock ->tk_core.seq.seqcount ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->&rq->__lock ->&n->list_lock FD: 1 BD: 3 +.+.: iunique_lock FD: 642 BD: 2 +.+.: &type->i_mutex_dir_key#6/1 ->rename_lock.seqcount ->fs_reclaim ->&dentry->d_lock ->&root->kernfs_rwsem ->tomoyo_ss ->&root->kernfs_iattr_rwsem ->cgroup_mutex ->&c->lock FD: 52 BD: 1 .+.+: kn->active#48 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 52 BD: 1 ++++: kn->active#49 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 3 BD: 91 ..-.: cgroup_threadgroup_rwsem.rss.gp_wait.lock ->&obj_hash[i].lock FD: 14 BD: 47 +.-.: (&hsr->prune_timer) ->&hsr->list_lock ->&obj_hash[i].lock ->&base->lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1251 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#189 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1035 FD: 26 BD: 91 ....: cgroup_threadgroup_rwsem.waiters.lock ->&p->pi_lock FD: 1 BD: 16 +.+.: (wq_completion)cpuset_migrate_mm FD: 642 BD: 2 +.+.: &type->i_mutex_dir_key#7/1 ->rename_lock.seqcount ->fs_reclaim ->&dentry->d_lock ->&root->kernfs_rwsem ->tomoyo_ss ->&root->kernfs_iattr_rwsem ->cgroup_mutex ->&c->lock ->pool_lock#2 ->&xa->xa_lock#5 ->&obj_hash[i].lock ->stock_lock ->cgroup_mutex.wait_lock ->&p->pi_lock FD: 52 BD: 1 ++++: kn->active#50 ->fs_reclaim ->&kernfs_locks->open_file_mutex[count] ->stock_lock ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount FD: 1 BD: 92 ....: cpuset_attach_wq.lock FD: 2 BD: 519 ..-.: stock_lock ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) FD: 49 BD: 1 .+.+: kn->active#51 ->fs_reclaim ->stock_lock ->pool_lock#2 ->&kernfs_locks->open_file_mutex[count] ->&c->lock ->&n->list_lock ->remove_cache_srcu ->&rq->__lock ->&____s->seqcount#2 ->&____s->seqcount FD: 50 BD: 1 .+.+: kn->active#52 ->fs_reclaim ->stock_lock ->&kernfs_locks->open_file_mutex[count] ->memcg_max_mutex ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount ->remove_cache_srcu FD: 1 BD: 8 +.+.: memcg_max_mutex FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#412 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1036 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#404 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1267 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1270 FD: 1 BD: 6 ....: &per_cpu(xt_recseq, i) FD: 227 BD: 5 +.+.: nf_nat_proto_mutex ->fs_reclaim ->pool_lock#2 ->nf_hook_mutex ->cpu_hotplug_lock ->&obj_hash[i].lock ->stock_lock ->&c->lock ->&____s->seqcount#2 ->&____s->seqcount ->nf_hook_mutex.wait_lock ->&p->pi_lock ->krc.lock ->&n->list_lock ->&rq->__lock FD: 29 BD: 1 +.+.: loop_validate_mutex ->&lo->lo_mutex ->&rq->__lock ->loop_validate_mutex.wait_lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#283 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#707 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1501 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1439 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#11 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1407 ->&rq->__lock FD: 1 BD: 1 ....: _rs.lock FD: 30 BD: 1 ..-.: &(&conn->info_timer)->timer FD: 2 BD: 24 +.+.: (work_completion)(&(&conn->info_timer)->work) ->&conn->chan_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1523 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#516 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#689 ->&rq->__lock FD: 1 BD: 2 +.+.: misc_mtx.wait_lock FD: 1 BD: 1 +.+.: &lslist_lock FD: 25 BD: 2 +.+.: &net->ipv4.ra_mutex ->&rq->__lock FD: 1981 BD: 1 +.+.: &ndev->req_lock ->&wq->mutex ->(&ndev->cmd_timer) ->&obj_hash[i].lock ->&base->lock ->(&ndev->data_timer) ->&rq->__lock ->pool_lock#2 ->&cfs_rq->removed.lock ->rcu_node_0 ->&rcu_state.expedited_wq ->(wq_completion)nfc24_nci_cmd_wq#3 ->(wq_completion)nfc2_nci_cmd_wq#878 ->(wq_completion)nfc16_nci_cmd_wq#4 ->(wq_completion)nfc3_nci_cmd_wq#328 ->(wq_completion)nfc25_nci_cmd_wq#3 ->(wq_completion)nfc22_nci_cmd_wq#4 ->(wq_completion)nfc26_nci_cmd_wq#4 ->(wq_completion)nfc27_nci_cmd_wq#3 ->(wq_completion)nfc28_nci_cmd_wq#4 ->(wq_completion)nfc29_nci_cmd_wq#3 ->(wq_completion)nfc35_nci_cmd_wq#2 ->(wq_completion)nfc32_nci_cmd_wq#5 ->(wq_completion)nfc34_nci_cmd_wq#3 ->(wq_completion)nfc33_nci_cmd_wq#3 ->(wq_completion)nfc31_nci_cmd_wq#3 ->(wq_completion)nfc30_nci_cmd_wq#3 ->(wq_completion)nfc2_nci_cmd_wq#879 ->(wq_completion)nfc3_nci_cmd_wq#329 ->(wq_completion)nfc2_nci_cmd_wq#880 ->(wq_completion)nfc3_nci_cmd_wq#330 ->(wq_completion)nfc3_nci_cmd_wq#331 ->(wq_completion)nfc3_nci_cmd_wq#332 ->(wq_completion)nfc3_nci_cmd_wq#333 ->(wq_completion)nfc4_nci_cmd_wq#93 ->(wq_completion)nfc3_nci_cmd_wq#334 ->(wq_completion)nfc4_nci_cmd_wq#94 ->(wq_completion)nfc2_nci_cmd_wq#881 ->(wq_completion)nfc3_nci_cmd_wq#335 ->(wq_completion)nfc2_nci_cmd_wq#882 ->(wq_completion)nfc2_nci_cmd_wq#883 ->(wq_completion)nfc3_nci_cmd_wq#336 ->(wq_completion)nfc2_nci_cmd_wq#884 ->(wq_completion)nfc3_nci_cmd_wq#337 ->(wq_completion)nfc2_nci_cmd_wq#885 ->(wq_completion)nfc2_nci_cmd_wq#886 ->(wq_completion)nfc2_nci_cmd_wq#887 ->(wq_completion)nfc2_nci_cmd_wq#888 ->(wq_completion)nfc3_nci_cmd_wq#338 ->(wq_completion)nfc4_nci_cmd_wq#95 ->(wq_completion)nfc2_nci_cmd_wq#889 ->(wq_completion)nfc2_nci_cmd_wq#890 ->(wq_completion)nfc3_nci_cmd_wq#339 ->(wq_completion)nfc2_nci_cmd_wq#891 ->(wq_completion)nfc2_nci_cmd_wq#892 ->(wq_completion)nfc3_nci_cmd_wq#340 ->(wq_completion)nfc2_nci_cmd_wq#893 ->(wq_completion)nfc2_nci_cmd_wq#894 ->(wq_completion)nfc2_nci_cmd_wq#895 ->(wq_completion)nfc3_nci_cmd_wq#341 ->(wq_completion)nfc2_nci_cmd_wq#896 ->(wq_completion)nfc3_nci_cmd_wq#342 ->(wq_completion)nfc2_nci_cmd_wq#897 ->(wq_completion)nfc3_nci_cmd_wq#343 ->(wq_completion)nfc2_nci_cmd_wq#898 ->(wq_completion)nfc2_nci_cmd_wq#899 ->(wq_completion)nfc2_nci_cmd_wq#900 ->(wq_completion)nfc3_nci_cmd_wq#344 ->(wq_completion)nfc2_nci_cmd_wq#901 ->(wq_completion)nfc2_nci_cmd_wq#902 ->(wq_completion)nfc3_nci_cmd_wq#345 ->(wq_completion)nfc2_nci_cmd_wq#903 ->(wq_completion)nfc3_nci_cmd_wq#347 ->(wq_completion)nfc2_nci_cmd_wq#904 ->(wq_completion)nfc2_nci_cmd_wq#905 ->(wq_completion)nfc2_nci_cmd_wq#906 ->(wq_completion)nfc2_nci_cmd_wq#907 ->(wq_completion)nfc2_nci_cmd_wq#908 ->(wq_completion)nfc2_nci_cmd_wq#909 ->(wq_completion)nfc3_nci_cmd_wq#348 ->(wq_completion)nfc2_nci_cmd_wq#910 ->(wq_completion)nfc2_nci_cmd_wq#911 ->(wq_completion)nfc3_nci_cmd_wq#349 ->(wq_completion)nfc4_nci_cmd_wq#97 ->(wq_completion)nfc2_nci_cmd_wq#912 ->(wq_completion)nfc2_nci_cmd_wq#913 ->(wq_completion)nfc2_nci_cmd_wq#914 ->(wq_completion)nfc3_nci_cmd_wq#350 ->(wq_completion)nfc2_nci_cmd_wq#915 ->(wq_completion)nfc2_nci_cmd_wq#916 ->(wq_completion)nfc2_nci_cmd_wq#917 ->(wq_completion)nfc3_nci_cmd_wq#351 ->(wq_completion)nfc2_nci_cmd_wq#918 ->(wq_completion)nfc2_nci_cmd_wq#919 ->(wq_completion)nfc3_nci_cmd_wq#352 ->(wq_completion)nfc2_nci_cmd_wq#920 ->(wq_completion)nfc3_nci_cmd_wq#353 ->(wq_completion)nfc4_nci_cmd_wq#98 ->(wq_completion)nfc2_nci_cmd_wq#921 ->(wq_completion)nfc2_nci_cmd_wq#922 ->(wq_completion)nfc2_nci_cmd_wq#923 ->(wq_completion)nfc2_nci_cmd_wq#924 ->(wq_completion)nfc3_nci_cmd_wq#354 ->(wq_completion)nfc2_nci_cmd_wq#925 ->(wq_completion)nfc2_nci_cmd_wq#926 ->(wq_completion)nfc2_nci_cmd_wq#927 ->(wq_completion)nfc3_nci_cmd_wq#355 ->(wq_completion)nfc2_nci_cmd_wq#928 ->(wq_completion)nfc2_nci_cmd_wq#929 ->(wq_completion)nfc2_nci_cmd_wq#930 ->(wq_completion)nfc5_nci_cmd_wq#9 ->(wq_completion)nfc3_nci_cmd_wq#356 ->(wq_completion)nfc4_nci_cmd_wq#99 ->(wq_completion)nfc6_nci_cmd_wq#7 ->(wq_completion)nfc2_nci_cmd_wq#931 ->(wq_completion)nfc5_nci_cmd_wq#10 ->(wq_completion)nfc2_nci_cmd_wq#932 ->(wq_completion)nfc6_nci_cmd_wq#8 ->(wq_completion)nfc3_nci_cmd_wq#357 ->(wq_completion)nfc7_nci_cmd_wq#8 ->(wq_completion)nfc4_nci_cmd_wq#100 ->(wq_completion)nfc2_nci_cmd_wq#933 ->(wq_completion)nfc3_nci_cmd_wq#358 ->(wq_completion)nfc4_nci_cmd_wq#101 ->(wq_completion)nfc2_nci_cmd_wq#934 ->(wq_completion)nfc2_nci_cmd_wq#935 ->(wq_completion)nfc2_nci_cmd_wq#936 ->(wq_completion)nfc3_nci_cmd_wq#359 ->(wq_completion)nfc2_nci_cmd_wq#937 ->(wq_completion)nfc3_nci_cmd_wq#360 ->(wq_completion)nfc4_nci_cmd_wq#102 ->(wq_completion)nfc2_nci_cmd_wq#938 ->(wq_completion)nfc3_nci_cmd_wq#361 ->(wq_completion)nfc2_nci_cmd_wq#939 ->(wq_completion)nfc5_nci_cmd_wq#11 ->(wq_completion)nfc4_nci_cmd_wq#103 ->(wq_completion)nfc3_nci_cmd_wq#362 ->(wq_completion)nfc2_nci_cmd_wq#940 ->(wq_completion)nfc3_nci_cmd_wq#363 ->(wq_completion)nfc4_nci_cmd_wq#104 ->(wq_completion)nfc2_nci_cmd_wq#941 ->(wq_completion)nfc3_nci_cmd_wq#364 ->(wq_completion)nfc3_nci_cmd_wq#365 ->(wq_completion)nfc2_nci_cmd_wq#942 ->(wq_completion)nfc2_nci_cmd_wq#943 ->(wq_completion)nfc3_nci_cmd_wq#366 ->(wq_completion)nfc2_nci_cmd_wq#944 ->(wq_completion)nfc2_nci_cmd_wq#945 ->(wq_completion)nfc3_nci_cmd_wq#367 ->(wq_completion)nfc2_nci_cmd_wq#946 ->(wq_completion)nfc3_nci_cmd_wq#368 ->(wq_completion)nfc2_nci_cmd_wq#947 ->(wq_completion)nfc3_nci_cmd_wq#369 ->(wq_completion)nfc2_nci_cmd_wq#948 ->(wq_completion)nfc4_nci_cmd_wq#105 ->(wq_completion)nfc2_nci_cmd_wq#949 ->(wq_completion)nfc3_nci_cmd_wq#370 ->(wq_completion)nfc2_nci_cmd_wq#950 ->(wq_completion)nfc3_nci_cmd_wq#371 ->(wq_completion)nfc2_nci_cmd_wq#951 ->(wq_completion)nfc3_nci_cmd_wq#372 ->(wq_completion)nfc2_nci_cmd_wq#952 ->(wq_completion)nfc3_nci_cmd_wq#373 ->(wq_completion)nfc4_nci_cmd_wq#106 ->(wq_completion)nfc6_nci_cmd_wq#9 ->(wq_completion)nfc7_nci_cmd_wq#9 ->(wq_completion)nfc11_nci_cmd_wq#5 ->(wq_completion)nfc12_nci_cmd_wq#6 ->(wq_completion)nfc14_nci_cmd_wq#6 ->(wq_completion)nfc12_nci_cmd_wq#7 ->(wq_completion)nfc21_nci_cmd_wq#4 ->(wq_completion)nfc22_nci_cmd_wq#5 ->(wq_completion)nfc25_nci_cmd_wq#4 ->(wq_completion)nfc26_nci_cmd_wq#5 ->(wq_completion)nfc28_nci_cmd_wq#5 ->(wq_completion)nfc28_nci_cmd_wq#6 ->(wq_completion)nfc28_nci_cmd_wq#7 ->(wq_completion)nfc36_nci_cmd_wq#4 ->(wq_completion)nfc37_nci_cmd_wq#3 ->(wq_completion)nfc2_nci_cmd_wq#953 ->(wq_completion)nfc3_nci_cmd_wq#374 ->(wq_completion)nfc4_nci_cmd_wq#107 ->(wq_completion)nfc5_nci_cmd_wq#12 ->(wq_completion)nfc8_nci_cmd_wq#5 ->(wq_completion)nfc3_nci_cmd_wq#375 ->(wq_completion)nfc6_nci_cmd_wq#10 ->(wq_completion)nfc7_nci_cmd_wq#10 ->(wq_completion)nfc9_nci_cmd_wq#5 ->(wq_completion)nfc10_nci_cmd_wq#5 ->(wq_completion)nfc11_nci_cmd_wq#6 ->(wq_completion)nfc13_nci_cmd_wq#7 ->(wq_completion)nfc15_nci_cmd_wq#5 ->(wq_completion)nfc16_nci_cmd_wq#5 ->(wq_completion)nfc17_nci_cmd_wq#7 ->(wq_completion)nfc18_nci_cmd_wq#4 ->(wq_completion)nfc19_nci_cmd_wq#5 ->(wq_completion)nfc20_nci_cmd_wq#4 ->(wq_completion)nfc12_nci_cmd_wq#8 ->(wq_completion)nfc14_nci_cmd_wq#7 ->(wq_completion)nfc21_nci_cmd_wq#5 ->(wq_completion)nfc22_nci_cmd_wq#6 ->(wq_completion)nfc23_nci_cmd_wq#4 ->(wq_completion)nfc24_nci_cmd_wq#4 ->(wq_completion)nfc25_nci_cmd_wq#5 ->(wq_completion)nfc27_nci_cmd_wq#4 ->(wq_completion)nfc26_nci_cmd_wq#6 ->(wq_completion)nfc29_nci_cmd_wq#4 ->(wq_completion)nfc40_nci_cmd_wq#2 ->(wq_completion)nfc2_nci_cmd_wq#954 ->(wq_completion)nfc4_nci_cmd_wq#108 ->(wq_completion)nfc3_nci_cmd_wq#376 ->(wq_completion)nfc5_nci_cmd_wq#13 ->(wq_completion)nfc6_nci_cmd_wq#11 ->(wq_completion)nfc7_nci_cmd_wq#11 ->(wq_completion)nfc8_nci_cmd_wq#6 ->(wq_completion)nfc9_nci_cmd_wq#6 ->(wq_completion)nfc10_nci_cmd_wq#6 ->(wq_completion)nfc11_nci_cmd_wq#7 ->(wq_completion)nfc12_nci_cmd_wq#9 ->(wq_completion)nfc39_nci_cmd_wq#2 ->(wq_completion)nfc38_nci_cmd_wq#2 ->(wq_completion)nfc37_nci_cmd_wq#4 ->(wq_completion)nfc36_nci_cmd_wq#5 ->(wq_completion)nfc28_nci_cmd_wq#8 ->(wq_completion)nfc35_nci_cmd_wq#3 ->(wq_completion)nfc34_nci_cmd_wq#4 ->(wq_completion)nfc33_nci_cmd_wq#4 ->(wq_completion)nfc32_nci_cmd_wq#6 ->(wq_completion)nfc31_nci_cmd_wq#4 ->(wq_completion)nfc30_nci_cmd_wq#4 ->(wq_completion)nfc9_nci_cmd_wq#7 ->(wq_completion)nfc8_nci_cmd_wq#7 ->(wq_completion)nfc7_nci_cmd_wq#12 ->(wq_completion)nfc6_nci_cmd_wq#12 ->(wq_completion)nfc5_nci_cmd_wq#14 ->(wq_completion)nfc3_nci_cmd_wq#377 ->(wq_completion)nfc4_nci_cmd_wq#109 ->(wq_completion)nfc2_nci_cmd_wq#955 ->(wq_completion)nfc13_nci_cmd_wq#8 ->(wq_completion)nfc2_nci_cmd_wq#956 ->(wq_completion)nfc3_nci_cmd_wq#378 ->(wq_completion)nfc4_nci_cmd_wq#110 ->(wq_completion)nfc2_nci_cmd_wq#957 ->(wq_completion)nfc2_nci_cmd_wq#958 ->(wq_completion)nfc2_nci_cmd_wq#959 ->(wq_completion)nfc4_nci_cmd_wq#111 ->(wq_completion)nfc3_nci_cmd_wq#379 ->(wq_completion)nfc2_nci_cmd_wq#960 ->(wq_completion)nfc2_nci_cmd_wq#961 ->(wq_completion)nfc2_nci_cmd_wq#962 ->(wq_completion)nfc2_nci_cmd_wq#963 ->(wq_completion)nfc2_nci_cmd_wq#964 ->(wq_completion)nfc3_nci_cmd_wq#380 ->(wq_completion)nfc2_nci_cmd_wq#965 ->(wq_completion)nfc2_nci_cmd_wq#966 ->(wq_completion)nfc3_nci_cmd_wq#381 ->(wq_completion)nfc3_nci_cmd_wq#383 ->(wq_completion)nfc2_nci_cmd_wq#967 ->(wq_completion)nfc4_nci_cmd_wq#112 ->(wq_completion)nfc2_nci_cmd_wq#968 ->(wq_completion)nfc4_nci_cmd_wq#113 ->(wq_completion)nfc5_nci_cmd_wq#15 ->(wq_completion)nfc3_nci_cmd_wq#384 ->(wq_completion)nfc2_nci_cmd_wq#969 ->(wq_completion)nfc2_nci_cmd_wq#970 ->(wq_completion)nfc4_nci_cmd_wq#114 ->(wq_completion)nfc5_nci_cmd_wq#16 ->(wq_completion)nfc3_nci_cmd_wq#385 ->(wq_completion)nfc2_nci_cmd_wq#971 ->(wq_completion)nfc3_nci_cmd_wq#386 ->(wq_completion)nfc2_nci_cmd_wq#972 ->(wq_completion)nfc4_nci_cmd_wq#115 ->(wq_completion)nfc5_nci_cmd_wq#17 ->(wq_completion)nfc6_nci_cmd_wq#13 ->(wq_completion)nfc3_nci_cmd_wq#387 ->(wq_completion)nfc2_nci_cmd_wq#974 ->(wq_completion)nfc3_nci_cmd_wq#388 ->(wq_completion)nfc4_nci_cmd_wq#116 ->(wq_completion)nfc2_nci_cmd_wq#975 ->(wq_completion)nfc3_nci_cmd_wq#389 ->(wq_completion)nfc2_nci_cmd_wq#976 ->(wq_completion)nfc3_nci_cmd_wq#390 ->(wq_completion)nfc2_nci_cmd_wq#977 ->(wq_completion)nfc4_nci_cmd_wq#117 ->(wq_completion)nfc2_nci_cmd_wq#978 ->(wq_completion)nfc2_nci_cmd_wq#979 ->(wq_completion)nfc3_nci_cmd_wq#391 ->(wq_completion)nfc4_nci_cmd_wq#118 ->(wq_completion)nfc2_nci_cmd_wq#980 ->(wq_completion)nfc2_nci_cmd_wq#981 ->(wq_completion)nfc3_nci_cmd_wq#392 ->(wq_completion)nfc4_nci_cmd_wq#119 ->(wq_completion)nfc2_nci_cmd_wq#982 ->(wq_completion)nfc3_nci_cmd_wq#393 ->(wq_completion)nfc4_nci_cmd_wq#120 ->(wq_completion)nfc2_nci_cmd_wq#983 ->(wq_completion)nfc3_nci_cmd_wq#394 ->(wq_completion)nfc2_nci_cmd_wq#984 ->(wq_completion)nfc2_nci_cmd_wq#985 ->(wq_completion)nfc3_nci_cmd_wq#395 ->(wq_completion)nfc2_nci_cmd_wq#986 ->(wq_completion)nfc2_nci_cmd_wq#987 ->(wq_completion)nfc3_nci_cmd_wq#396 ->(wq_completion)nfc4_nci_cmd_wq#121 ->(wq_completion)nfc2_nci_cmd_wq#988 ->(wq_completion)nfc3_nci_cmd_wq#397 ->(wq_completion)nfc4_nci_cmd_wq#122 ->(wq_completion)nfc2_nci_cmd_wq#989 ->(wq_completion)nfc3_nci_cmd_wq#398 ->(wq_completion)nfc2_nci_cmd_wq#990 ->(wq_completion)nfc3_nci_cmd_wq#399 ->(wq_completion)nfc2_nci_cmd_wq#991 ->(wq_completion)nfc3_nci_cmd_wq#400 ->(wq_completion)nfc2_nci_cmd_wq#992 ->(wq_completion)nfc3_nci_cmd_wq#401 ->(wq_completion)nfc3_nci_cmd_wq#402 ->(wq_completion)nfc2_nci_cmd_wq#993 ->(wq_completion)nfc4_nci_cmd_wq#123 ->(wq_completion)nfc5_nci_cmd_wq#18 ->(wq_completion)nfc2_nci_cmd_wq#994 ->(wq_completion)nfc4_nci_cmd_wq#124 ->(wq_completion)nfc2_nci_cmd_wq#995 ->(wq_completion)nfc3_nci_cmd_wq#403 ->(wq_completion)nfc2_nci_cmd_wq#996 ->(wq_completion)nfc4_nci_cmd_wq#125 ->(wq_completion)nfc3_nci_cmd_wq#404 ->(wq_completion)nfc5_nci_cmd_wq#19 ->(wq_completion)nfc2_nci_cmd_wq#997 ->(wq_completion)nfc2_nci_cmd_wq#998 ->(wq_completion)nfc2_nci_cmd_wq#999 ->(wq_completion)nfc3_nci_cmd_wq#405 ->(wq_completion)nfc2_nci_cmd_wq#1000 ->(wq_completion)nfc2_nci_cmd_wq#1001 ->(wq_completion)nfc3_nci_cmd_wq#406 ->(wq_completion)nfc2_nci_cmd_wq#1002 ->(wq_completion)nfc2_nci_cmd_wq#1003 ->(wq_completion)nfc3_nci_cmd_wq#407 ->(wq_completion)nfc4_nci_cmd_wq#126 ->(wq_completion)nfc2_nci_cmd_wq#1004 ->(wq_completion)nfc2_nci_cmd_wq#1005 ->(wq_completion)nfc2_nci_cmd_wq#1006 ->(wq_completion)nfc3_nci_cmd_wq#408 ->(wq_completion)nfc4_nci_cmd_wq#127 ->(wq_completion)nfc2_nci_cmd_wq#1007 ->(wq_completion)nfc2_nci_cmd_wq#1008 ->(wq_completion)nfc3_nci_cmd_wq#409 ->(wq_completion)nfc2_nci_cmd_wq#1009 ->(wq_completion)nfc2_nci_cmd_wq#1010 ->(wq_completion)nfc2_nci_cmd_wq#1011 ->(wq_completion)nfc2_nci_cmd_wq#1012 ->(wq_completion)nfc2_nci_cmd_wq#1013 ->(wq_completion)nfc2_nci_cmd_wq#1014 ->(wq_completion)nfc2_nci_cmd_wq#1015 ->(wq_completion)nfc3_nci_cmd_wq#410 ->(wq_completion)nfc4_nci_cmd_wq#128 ->(wq_completion)nfc2_nci_cmd_wq#1016 ->(wq_completion)nfc2_nci_cmd_wq#1017 ->(wq_completion)nfc3_nci_cmd_wq#411 ->(wq_completion)nfc2_nci_cmd_wq#1018 ->(wq_completion)nfc2_nci_cmd_wq#1019 ->(wq_completion)nfc2_nci_cmd_wq#1020 ->(wq_completion)nfc2_nci_cmd_wq#1021 ->(wq_completion)nfc2_nci_cmd_wq#1022 ->(wq_completion)nfc2_nci_cmd_wq#1023 ->(wq_completion)nfc2_nci_cmd_wq#1024 ->(wq_completion)nfc2_nci_cmd_wq#1025 ->(wq_completion)nfc2_nci_cmd_wq#1026 ->(wq_completion)nfc2_nci_cmd_wq#1027 ->(wq_completion)nfc2_nci_cmd_wq#1028 ->(wq_completion)nfc2_nci_cmd_wq#1029 ->(wq_completion)nfc3_nci_cmd_wq#412 ->(wq_completion)nfc2_nci_cmd_wq#1030 ->(wq_completion)nfc2_nci_cmd_wq#1031 ->(wq_completion)nfc2_nci_cmd_wq#1032 ->(wq_completion)nfc2_nci_cmd_wq#1033 ->(wq_completion)nfc3_nci_cmd_wq#413 ->(wq_completion)nfc2_nci_cmd_wq#1034 ->(wq_completion)nfc3_nci_cmd_wq#414 ->(wq_completion)nfc2_nci_cmd_wq#1035 ->(wq_completion)nfc2_nci_cmd_wq#1036 ->(wq_completion)nfc3_nci_cmd_wq#415 ->(wq_completion)nfc2_nci_cmd_wq#1037 ->(wq_completion)nfc3_nci_cmd_wq#416 ->(wq_completion)nfc2_nci_cmd_wq#1038 ->(wq_completion)nfc3_nci_cmd_wq#417 ->(wq_completion)nfc2_nci_cmd_wq#1039 ->(wq_completion)nfc2_nci_cmd_wq#1040 ->(wq_completion)nfc3_nci_cmd_wq#418 ->(wq_completion)nfc2_nci_cmd_wq#1041 ->(wq_completion)nfc3_nci_cmd_wq#419 ->(wq_completion)nfc2_nci_cmd_wq#1042 ->(wq_completion)nfc2_nci_cmd_wq#1043 ->(wq_completion)nfc3_nci_cmd_wq#421 ->(wq_completion)nfc2_nci_cmd_wq#1044 ->(wq_completion)nfc2_nci_cmd_wq#1045 ->(wq_completion)nfc3_nci_cmd_wq#422 ->(wq_completion)nfc2_nci_cmd_wq#1046 ->(wq_completion)nfc3_nci_cmd_wq#423 ->(wq_completion)nfc2_nci_cmd_wq#1047 ->(wq_completion)nfc3_nci_cmd_wq#424 ->(wq_completion)nfc2_nci_cmd_wq#1048 ->(wq_completion)nfc4_nci_cmd_wq#130 ->(wq_completion)nfc2_nci_cmd_wq#1049 ->(wq_completion)nfc2_nci_cmd_wq#1050 ->(wq_completion)nfc2_nci_cmd_wq#1051 ->(wq_completion)nfc2_nci_cmd_wq#1052 ->(wq_completion)nfc3_nci_cmd_wq#425 ->(wq_completion)nfc2_nci_cmd_wq#1053 ->(wq_completion)nfc2_nci_cmd_wq#1054 ->(wq_completion)nfc3_nci_cmd_wq#426 ->(wq_completion)nfc2_nci_cmd_wq#1055 ->(wq_completion)nfc2_nci_cmd_wq#1056 ->(wq_completion)nfc2_nci_cmd_wq#1057 ->(wq_completion)nfc2_nci_cmd_wq#1058 ->(wq_completion)nfc2_nci_cmd_wq#1059 ->(wq_completion)nfc2_nci_cmd_wq#1060 ->(wq_completion)nfc3_nci_cmd_wq#427 ->(wq_completion)nfc2_nci_cmd_wq#1061 ->(wq_completion)nfc3_nci_cmd_wq#428 ->(wq_completion)nfc2_nci_cmd_wq#1062 ->(wq_completion)nfc3_nci_cmd_wq#429 ->(wq_completion)nfc2_nci_cmd_wq#1063 ->(wq_completion)nfc4_nci_cmd_wq#131 ->(wq_completion)nfc3_nci_cmd_wq#430 ->(wq_completion)nfc2_nci_cmd_wq#1064 ->(wq_completion)nfc3_nci_cmd_wq#431 ->(wq_completion)nfc2_nci_cmd_wq#1065 ->(wq_completion)nfc3_nci_cmd_wq#432 ->(wq_completion)nfc16_nci_cmd_wq#6 ->(wq_completion)nfc17_nci_cmd_wq#8 ->(wq_completion)nfc31_nci_cmd_wq#5 ->(wq_completion)nfc34_nci_cmd_wq#5 ->(wq_completion)nfc2_nci_cmd_wq#1066 ->(wq_completion)nfc4_nci_cmd_wq#132 ->(wq_completion)nfc5_nci_cmd_wq#20 ->(wq_completion)nfc3_nci_cmd_wq#433 ->(wq_completion)nfc6_nci_cmd_wq#14 ->(wq_completion)nfc7_nci_cmd_wq#13 ->(wq_completion)nfc8_nci_cmd_wq#8 ->(wq_completion)nfc9_nci_cmd_wq#8 ->(wq_completion)nfc10_nci_cmd_wq#7 ->(wq_completion)nfc11_nci_cmd_wq#8 ->(wq_completion)nfc12_nci_cmd_wq#10 ->(wq_completion)nfc7_nci_cmd_wq#14 ->(wq_completion)nfc13_nci_cmd_wq#9 ->(wq_completion)nfc9_nci_cmd_wq#9 ->(wq_completion)nfc14_nci_cmd_wq#8 ->(wq_completion)nfc15_nci_cmd_wq#6 ->(wq_completion)nfc16_nci_cmd_wq#7 ->(wq_completion)nfc17_nci_cmd_wq#9 ->(wq_completion)nfc18_nci_cmd_wq#5 ->(wq_completion)nfc19_nci_cmd_wq#6 ->(wq_completion)nfc20_nci_cmd_wq#5 ->(wq_completion)nfc21_nci_cmd_wq#6 ->(wq_completion)nfc22_nci_cmd_wq#7 ->(wq_completion)nfc23_nci_cmd_wq#5 ->(wq_completion)nfc24_nci_cmd_wq#5 ->(wq_completion)nfc25_nci_cmd_wq#6 ->(wq_completion)nfc26_nci_cmd_wq#7 ->(wq_completion)nfc34_nci_cmd_wq#6 ->(wq_completion)nfc2_nci_cmd_wq#1067 ->(wq_completion)nfc4_nci_cmd_wq#133 ->(wq_completion)nfc5_nci_cmd_wq#21 ->(wq_completion)nfc3_nci_cmd_wq#434 ->(wq_completion)nfc6_nci_cmd_wq#15 ->(wq_completion)nfc8_nci_cmd_wq#9 ->(wq_completion)nfc10_nci_cmd_wq#8 ->(wq_completion)nfc7_nci_cmd_wq#15 ->(wq_completion)nfc9_nci_cmd_wq#10 ->(wq_completion)nfc11_nci_cmd_wq#9 ->(wq_completion)nfc12_nci_cmd_wq#11 ->(wq_completion)nfc13_nci_cmd_wq#10 ->(wq_completion)nfc14_nci_cmd_wq#9 ->(wq_completion)nfc15_nci_cmd_wq#7 ->(wq_completion)nfc16_nci_cmd_wq#8 ->(wq_completion)nfc17_nci_cmd_wq#10 ->(wq_completion)nfc18_nci_cmd_wq#6 ->(wq_completion)nfc39_nci_cmd_wq#3 ->(wq_completion)nfc38_nci_cmd_wq#3 ->(wq_completion)nfc37_nci_cmd_wq#5 ->(wq_completion)nfc36_nci_cmd_wq#6 ->(wq_completion)nfc35_nci_cmd_wq#4 ->(wq_completion)nfc33_nci_cmd_wq#5 ->(wq_completion)nfc31_nci_cmd_wq#6 ->(wq_completion)nfc32_nci_cmd_wq#7 ->(wq_completion)nfc30_nci_cmd_wq#5 ->(wq_completion)nfc29_nci_cmd_wq#5 ->(wq_completion)nfc28_nci_cmd_wq#9 ->(wq_completion)nfc27_nci_cmd_wq#5 ->(wq_completion)nfc10_nci_cmd_wq#9 ->(wq_completion)nfc9_nci_cmd_wq#11 ->(wq_completion)nfc7_nci_cmd_wq#16 ->(wq_completion)nfc8_nci_cmd_wq#10 ->(wq_completion)nfc6_nci_cmd_wq#16 ->(wq_completion)nfc5_nci_cmd_wq#22 ->(wq_completion)nfc3_nci_cmd_wq#435 ->(wq_completion)nfc4_nci_cmd_wq#134 ->(wq_completion)nfc2_nci_cmd_wq#1068 ->(wq_completion)nfc19_nci_cmd_wq#7 ->(wq_completion)nfc2_nci_cmd_wq#1069 ->(wq_completion)nfc3_nci_cmd_wq#436 ->(wq_completion)nfc2_nci_cmd_wq#1070 ->(wq_completion)nfc2_nci_cmd_wq#1071 ->(wq_completion)nfc3_nci_cmd_wq#437 ->(wq_completion)nfc2_nci_cmd_wq#1072 ->(wq_completion)nfc2_nci_cmd_wq#1073 ->(wq_completion)nfc2_nci_cmd_wq#1074 ->(wq_completion)nfc3_nci_cmd_wq#438 ->(wq_completion)nfc4_nci_cmd_wq#135 ->(wq_completion)nfc2_nci_cmd_wq#1075 ->(wq_completion)nfc2_nci_cmd_wq#1076 ->(wq_completion)nfc3_nci_cmd_wq#439 ->(wq_completion)nfc2_nci_cmd_wq#1077 ->(wq_completion)nfc2_nci_cmd_wq#1078 ->(wq_completion)nfc3_nci_cmd_wq#440 ->(wq_completion)nfc4_nci_cmd_wq#136 ->(wq_completion)nfc2_nci_cmd_wq#1079 ->(wq_completion)nfc3_nci_cmd_wq#441 ->(wq_completion)nfc2_nci_cmd_wq#1080 ->(wq_completion)nfc2_nci_cmd_wq#1081 ->(wq_completion)nfc3_nci_cmd_wq#442 ->(wq_completion)nfc4_nci_cmd_wq#137 ->(wq_completion)nfc2_nci_cmd_wq#1082 ->(wq_completion)nfc2_nci_cmd_wq#1083 ->(wq_completion)nfc2_nci_cmd_wq#1084 ->(wq_completion)nfc3_nci_cmd_wq#443 ->(wq_completion)nfc3_nci_cmd_wq#444 ->(wq_completion)nfc2_nci_cmd_wq#1085 ->(wq_completion)nfc4_nci_cmd_wq#138 ->(wq_completion)nfc2_nci_cmd_wq#1086 ->(wq_completion)nfc2_nci_cmd_wq#1087 ->(wq_completion)nfc3_nci_cmd_wq#445 ->(wq_completion)nfc2_nci_cmd_wq#1088 ->(wq_completion)nfc2_nci_cmd_wq#1089 ->(wq_completion)nfc4_nci_cmd_wq#139 ->(wq_completion)nfc2_nci_cmd_wq#1090 ->(wq_completion)nfc3_nci_cmd_wq#446 ->(wq_completion)nfc2_nci_cmd_wq#1091 ->(wq_completion)nfc3_nci_cmd_wq#447 ->(wq_completion)nfc2_nci_cmd_wq#1092 ->(wq_completion)nfc2_nci_cmd_wq#1093 ->(wq_completion)nfc3_nci_cmd_wq#448 ->(wq_completion)nfc2_nci_cmd_wq#1094 ->(wq_completion)nfc2_nci_cmd_wq#1095 ->(wq_completion)nfc3_nci_cmd_wq#449 ->(wq_completion)nfc4_nci_cmd_wq#140 ->(wq_completion)nfc2_nci_cmd_wq#1096 ->(wq_completion)nfc4_nci_cmd_wq#141 ->(wq_completion)nfc3_nci_cmd_wq#450 ->(wq_completion)nfc2_nci_cmd_wq#1097 ->(wq_completion)nfc4_nci_cmd_wq#142 ->(wq_completion)nfc2_nci_cmd_wq#1098 ->(wq_completion)nfc3_nci_cmd_wq#451 ->(wq_completion)nfc2_nci_cmd_wq#1099 ->(wq_completion)nfc3_nci_cmd_wq#452 ->(wq_completion)nfc2_nci_cmd_wq#1100 ->(wq_completion)nfc4_nci_cmd_wq#143 ->(wq_completion)nfc2_nci_cmd_wq#1101 ->(wq_completion)nfc4_nci_cmd_wq#144 ->(wq_completion)nfc3_nci_cmd_wq#453 ->(wq_completion)nfc5_nci_cmd_wq#23 ->(wq_completion)nfc2_nci_cmd_wq#1102 ->(wq_completion)nfc3_nci_cmd_wq#454 ->(wq_completion)nfc2_nci_cmd_wq#1103 ->(wq_completion)nfc2_nci_cmd_wq#1104 ->(wq_completion)nfc3_nci_cmd_wq#455 ->(wq_completion)nfc4_nci_cmd_wq#145 ->(wq_completion)nfc2_nci_cmd_wq#1105 ->(wq_completion)nfc2_nci_cmd_wq#1106 ->(wq_completion)nfc2_nci_cmd_wq#1107 ->(wq_completion)nfc3_nci_cmd_wq#456 ->(wq_completion)nfc2_nci_cmd_wq#1108 ->(wq_completion)nfc3_nci_cmd_wq#457 ->(wq_completion)nfc2_nci_cmd_wq#1109 ->(wq_completion)nfc3_nci_cmd_wq#458 ->(wq_completion)nfc2_nci_cmd_wq#1110 ->(wq_completion)nfc3_nci_cmd_wq#459 ->(wq_completion)nfc2_nci_cmd_wq#1111 ->(wq_completion)nfc2_nci_cmd_wq#1112 ->(wq_completion)nfc3_nci_cmd_wq#460 ->(wq_completion)nfc2_nci_cmd_wq#1113 ->(wq_completion)nfc2_nci_cmd_wq#1114 ->(wq_completion)nfc2_nci_cmd_wq#1115 ->(wq_completion)nfc3_nci_cmd_wq#461 ->(wq_completion)nfc2_nci_cmd_wq#1116 ->(wq_completion)nfc3_nci_cmd_wq#462 ->(wq_completion)nfc2_nci_cmd_wq#1117 ->(wq_completion)nfc2_nci_cmd_wq#1118 ->(wq_completion)nfc3_nci_cmd_wq#463 ->(wq_completion)nfc2_nci_cmd_wq#1119 ->(wq_completion)nfc2_nci_cmd_wq#1120 ->(wq_completion)nfc3_nci_cmd_wq#464 ->(wq_completion)nfc4_nci_cmd_wq#146 ->(wq_completion)nfc3_nci_cmd_wq#465 ->(wq_completion)nfc2_nci_cmd_wq#1121 ->(wq_completion)nfc2_nci_cmd_wq#1122 ->(wq_completion)nfc2_nci_cmd_wq#1123 ->(wq_completion)nfc3_nci_cmd_wq#466 ->(wq_completion)nfc2_nci_cmd_wq#1124 ->(wq_completion)nfc3_nci_cmd_wq#467 ->(wq_completion)nfc2_nci_cmd_wq#1125 ->(wq_completion)nfc2_nci_cmd_wq#1126 ->(wq_completion)nfc2_nci_cmd_wq#1127 ->(wq_completion)nfc3_nci_cmd_wq#468 ->(wq_completion)nfc2_nci_cmd_wq#1128 ->(wq_completion)nfc2_nci_cmd_wq#1129 ->(wq_completion)nfc3_nci_cmd_wq#469 ->(wq_completion)nfc4_nci_cmd_wq#147 ->(wq_completion)nfc2_nci_cmd_wq#1130 ->(wq_completion)nfc3_nci_cmd_wq#470 ->(wq_completion)nfc2_nci_cmd_wq#1131 ->(wq_completion)nfc3_nci_cmd_wq#471 ->(wq_completion)nfc4_nci_cmd_wq#148 ->(wq_completion)nfc2_nci_cmd_wq#1132 ->(wq_completion)nfc2_nci_cmd_wq#1133 ->(wq_completion)nfc3_nci_cmd_wq#472 ->(wq_completion)nfc2_nci_cmd_wq#1134 ->(wq_completion)nfc3_nci_cmd_wq#473 ->(wq_completion)nfc2_nci_cmd_wq#1135 ->(wq_completion)nfc2_nci_cmd_wq#1136 ->(wq_completion)nfc3_nci_cmd_wq#474 ->(wq_completion)nfc2_nci_cmd_wq#1137 ->(wq_completion)nfc2_nci_cmd_wq#1138 ->(wq_completion)nfc2_nci_cmd_wq#1139 ->(wq_completion)nfc3_nci_cmd_wq#475 ->(wq_completion)nfc4_nci_cmd_wq#149 ->(wq_completion)nfc2_nci_cmd_wq#1140 ->(wq_completion)nfc2_nci_cmd_wq#1141 ->(wq_completion)nfc2_nci_cmd_wq#1142 ->(wq_completion)nfc2_nci_cmd_wq#1143 ->(wq_completion)nfc2_nci_cmd_wq#1144 ->(wq_completion)nfc4_nci_cmd_wq#150 ->(wq_completion)nfc2_nci_cmd_wq#1145 ->(wq_completion)nfc3_nci_cmd_wq#476 ->(wq_completion)nfc2_nci_cmd_wq#1146 ->(wq_completion)nfc3_nci_cmd_wq#477 ->(wq_completion)nfc2_nci_cmd_wq#1147 ->(wq_completion)nfc2_nci_cmd_wq#1148 ->(wq_completion)nfc2_nci_cmd_wq#1149 ->(wq_completion)nfc2_nci_cmd_wq#1150 ->(wq_completion)nfc3_nci_cmd_wq#478 ->(wq_completion)nfc4_nci_cmd_wq#151 ->(wq_completion)nfc3_nci_cmd_wq#479 ->(wq_completion)nfc2_nci_cmd_wq#1151 ->(wq_completion)nfc3_nci_cmd_wq#480 ->(wq_completion)nfc2_nci_cmd_wq#1152 ->(wq_completion)nfc2_nci_cmd_wq#1153 ->(wq_completion)nfc3_nci_cmd_wq#481 ->(wq_completion)nfc2_nci_cmd_wq#1154 ->(wq_completion)nfc3_nci_cmd_wq#482 ->(wq_completion)nfc2_nci_cmd_wq#1155 ->(wq_completion)nfc3_nci_cmd_wq#483 ->(wq_completion)nfc4_nci_cmd_wq#152 ->(wq_completion)nfc2_nci_cmd_wq#1156 ->(wq_completion)nfc2_nci_cmd_wq#1157 ->(wq_completion)nfc3_nci_cmd_wq#484 ->(wq_completion)nfc2_nci_cmd_wq#1158 ->(wq_completion)nfc3_nci_cmd_wq#485 ->(wq_completion)nfc2_nci_cmd_wq#1159 ->(wq_completion)nfc2_nci_cmd_wq#1160 ->(wq_completion)nfc3_nci_cmd_wq#486 ->(wq_completion)nfc2_nci_cmd_wq#1161 ->(wq_completion)nfc3_nci_cmd_wq#487 ->(wq_completion)nfc2_nci_cmd_wq#1162 ->(wq_completion)nfc2_nci_cmd_wq#1163 ->(wq_completion)nfc3_nci_cmd_wq#488 ->(wq_completion)nfc2_nci_cmd_wq#1164 ->(wq_completion)nfc4_nci_cmd_wq#153 ->(wq_completion)nfc2_nci_cmd_wq#1165 ->(wq_completion)nfc2_nci_cmd_wq#1166 ->(wq_completion)nfc2_nci_cmd_wq#1167 ->(wq_completion)nfc2_nci_cmd_wq#1168 ->(wq_completion)nfc2_nci_cmd_wq#1169 ->(wq_completion)nfc3_nci_cmd_wq#489 ->(wq_completion)nfc4_nci_cmd_wq#154 ->(wq_completion)nfc2_nci_cmd_wq#1170 ->(wq_completion)nfc3_nci_cmd_wq#490 ->(wq_completion)nfc3_nci_cmd_wq#491 ->(wq_completion)nfc2_nci_cmd_wq#1171 ->(wq_completion)nfc3_nci_cmd_wq#492 ->(wq_completion)nfc2_nci_cmd_wq#1172 ->(wq_completion)nfc3_nci_cmd_wq#493 ->(wq_completion)nfc3_nci_cmd_wq#494 ->(wq_completion)nfc2_nci_cmd_wq#1173 ->(wq_completion)nfc4_nci_cmd_wq#155 ->(wq_completion)nfc3_nci_cmd_wq#495 ->(wq_completion)nfc2_nci_cmd_wq#1174 ->(wq_completion)nfc4_nci_cmd_wq#156 ->(wq_completion)nfc5_nci_cmd_wq#25 ->(wq_completion)nfc6_nci_cmd_wq#17 ->(wq_completion)nfc2_nci_cmd_wq#1175 ->(wq_completion)nfc2_nci_cmd_wq#1176 ->(wq_completion)nfc3_nci_cmd_wq#496 ->(wq_completion)nfc5_nci_cmd_wq#26 ->(wq_completion)nfc4_nci_cmd_wq#157 ->(wq_completion)nfc7_nci_cmd_wq#17 ->(wq_completion)nfc6_nci_cmd_wq#18 ->(wq_completion)nfc2_nci_cmd_wq#1177 ->(wq_completion)nfc3_nci_cmd_wq#497 ->(wq_completion)nfc2_nci_cmd_wq#1178 ->(wq_completion)nfc2_nci_cmd_wq#1179 ->(wq_completion)nfc2_nci_cmd_wq#1180 ->(wq_completion)nfc3_nci_cmd_wq#498 ->(wq_completion)nfc2_nci_cmd_wq#1181 ->(wq_completion)nfc3_nci_cmd_wq#499 ->(wq_completion)nfc2_nci_cmd_wq#1182 ->(wq_completion)nfc3_nci_cmd_wq#500 ->(wq_completion)nfc4_nci_cmd_wq#158 ->(wq_completion)nfc2_nci_cmd_wq#1183 ->(wq_completion)nfc3_nci_cmd_wq#501 ->(wq_completion)nfc2_nci_cmd_wq#1184 ->(wq_completion)nfc3_nci_cmd_wq#502 ->(wq_completion)nfc2_nci_cmd_wq#1185 ->(wq_completion)nfc2_nci_cmd_wq#1186 ->(wq_completion)nfc2_nci_cmd_wq#1187 ->(wq_completion)nfc2_nci_cmd_wq#1188 ->(wq_completion)nfc3_nci_cmd_wq#503 ->(wq_completion)nfc4_nci_cmd_wq#159 ->(wq_completion)nfc2_nci_cmd_wq#1189 ->(wq_completion)nfc2_nci_cmd_wq#1190 ->(wq_completion)nfc3_nci_cmd_wq#505 ->(wq_completion)nfc2_nci_cmd_wq#1191 ->(wq_completion)nfc2_nci_cmd_wq#1192 ->(wq_completion)nfc3_nci_cmd_wq#506 ->(wq_completion)nfc2_nci_cmd_wq#1193 ->(wq_completion)nfc3_nci_cmd_wq#507 ->(wq_completion)nfc2_nci_cmd_wq#1194 ->(wq_completion)nfc4_nci_cmd_wq#160 ->(wq_completion)nfc3_nci_cmd_wq#508 ->(wq_completion)nfc2_nci_cmd_wq#1195 ->(wq_completion)nfc3_nci_cmd_wq#509 ->(wq_completion)nfc2_nci_cmd_wq#1196 ->(wq_completion)nfc5_nci_cmd_wq#27 ->(wq_completion)nfc2_nci_cmd_wq#1197 ->(wq_completion)nfc4_nci_cmd_wq#161 ->(wq_completion)nfc3_nci_cmd_wq#510 ->(wq_completion)nfc4_nci_cmd_wq#162 ->(wq_completion)nfc3_nci_cmd_wq#511 ->(wq_completion)nfc6_nci_cmd_wq#19 ->(wq_completion)nfc4_nci_cmd_wq#163 ->(wq_completion)nfc5_nci_cmd_wq#29 ->(wq_completion)nfc3_nci_cmd_wq#512 ->(wq_completion)nfc3_nci_cmd_wq#513 ->(wq_completion)nfc4_nci_cmd_wq#164 ->(wq_completion)nfc3_nci_cmd_wq#514 ->(wq_completion)nfc4_nci_cmd_wq#165 ->(wq_completion)nfc3_nci_cmd_wq#515 ->(wq_completion)nfc4_nci_cmd_wq#166 ->(wq_completion)nfc5_nci_cmd_wq#30 ->(wq_completion)nfc3_nci_cmd_wq#516 ->(wq_completion)nfc4_nci_cmd_wq#167 ->(wq_completion)nfc3_nci_cmd_wq#517 ->(wq_completion)nfc3_nci_cmd_wq#518 ->(wq_completion)nfc2_nci_cmd_wq#1198 ->(wq_completion)nfc2_nci_cmd_wq#1199 ->(wq_completion)nfc3_nci_cmd_wq#519 ->(wq_completion)nfc2_nci_cmd_wq#1200 ->(wq_completion)nfc3_nci_cmd_wq#520 ->(wq_completion)nfc2_nci_cmd_wq#1201 ->(wq_completion)nfc3_nci_cmd_wq#521 ->(wq_completion)nfc4_nci_cmd_wq#168 ->(wq_completion)nfc5_nci_cmd_wq#31 ->(wq_completion)nfc2_nci_cmd_wq#1202 ->(wq_completion)nfc2_nci_cmd_wq#1203 ->(wq_completion)nfc3_nci_cmd_wq#522 ->(wq_completion)nfc2_nci_cmd_wq#1204 ->(wq_completion)nfc3_nci_cmd_wq#523 ->(wq_completion)nfc2_nci_cmd_wq#1205 ->(wq_completion)nfc3_nci_cmd_wq#524 ->(wq_completion)nfc2_nci_cmd_wq#1206 ->(wq_completion)nfc2_nci_cmd_wq#1207 ->(wq_completion)nfc3_nci_cmd_wq#525 ->(wq_completion)nfc2_nci_cmd_wq#1208 ->(wq_completion)nfc4_nci_cmd_wq#169 ->(wq_completion)nfc5_nci_cmd_wq#32 ->(wq_completion)nfc2_nci_cmd_wq#1209 ->(wq_completion)nfc3_nci_cmd_wq#526 ->(wq_completion)nfc2_nci_cmd_wq#1210 ->(wq_completion)nfc3_nci_cmd_wq#527 ->(wq_completion)nfc2_nci_cmd_wq#1211 ->(wq_completion)nfc2_nci_cmd_wq#1212 ->(wq_completion)nfc3_nci_cmd_wq#528 ->(wq_completion)nfc2_nci_cmd_wq#1213 ->(wq_completion)nfc4_nci_cmd_wq#170 ->(wq_completion)nfc2_nci_cmd_wq#1214 ->(wq_completion)nfc2_nci_cmd_wq#1215 ->(wq_completion)nfc3_nci_cmd_wq#529 ->(wq_completion)nfc2_nci_cmd_wq#1216 ->(wq_completion)nfc3_nci_cmd_wq#530 ->(wq_completion)nfc4_nci_cmd_wq#171 ->(wq_completion)nfc2_nci_cmd_wq#1217 ->(wq_completion)nfc3_nci_cmd_wq#531 ->(wq_completion)nfc4_nci_cmd_wq#172 ->(wq_completion)nfc3_nci_cmd_wq#532 ->(wq_completion)nfc2_nci_cmd_wq#1218 ->(wq_completion)nfc3_nci_cmd_wq#533 ->(wq_completion)nfc2_nci_cmd_wq#1219 ->(wq_completion)nfc3_nci_cmd_wq#534 ->(wq_completion)nfc4_nci_cmd_wq#173 ->(wq_completion)nfc3_nci_cmd_wq#535 ->(wq_completion)nfc2_nci_cmd_wq#1220 ->(wq_completion)nfc4_nci_cmd_wq#174 ->(wq_completion)nfc2_nci_cmd_wq#1221 ->(wq_completion)nfc4_nci_cmd_wq#175 ->(wq_completion)nfc3_nci_cmd_wq#536 ->(wq_completion)nfc2_nci_cmd_wq#1222 ->(wq_completion)nfc2_nci_cmd_wq#1223 ->(wq_completion)nfc5_nci_cmd_wq#34 ->(wq_completion)nfc3_nci_cmd_wq#537 ->(wq_completion)nfc2_nci_cmd_wq#1224 ->(wq_completion)nfc3_nci_cmd_wq#538 ->(wq_completion)nfc4_nci_cmd_wq#176 ->(wq_completion)nfc3_nci_cmd_wq#539 ->(wq_completion)nfc2_nci_cmd_wq#1225 ->(wq_completion)nfc2_nci_cmd_wq#1226 ->(wq_completion)nfc3_nci_cmd_wq#540 ->(wq_completion)nfc2_nci_cmd_wq#1227 ->(wq_completion)nfc2_nci_cmd_wq#1228 ->(wq_completion)nfc2_nci_cmd_wq#1229 ->(wq_completion)nfc4_nci_cmd_wq#177 ->(wq_completion)nfc3_nci_cmd_wq#541 ->(wq_completion)nfc5_nci_cmd_wq#35 ->(wq_completion)nfc2_nci_cmd_wq#1230 ->(wq_completion)nfc4_nci_cmd_wq#178 ->(wq_completion)nfc3_nci_cmd_wq#542 ->(wq_completion)nfc4_nci_cmd_wq#179 ->(wq_completion)nfc2_nci_cmd_wq#1231 ->(wq_completion)nfc5_nci_cmd_wq#36 ->(wq_completion)nfc3_nci_cmd_wq#543 ->(wq_completion)nfc2_nci_cmd_wq#1232 ->(wq_completion)nfc4_nci_cmd_wq#180 ->(wq_completion)nfc3_nci_cmd_wq#544 ->(wq_completion)nfc2_nci_cmd_wq#1233 ->(wq_completion)nfc2_nci_cmd_wq#1234 ->(wq_completion)nfc5_nci_cmd_wq#37 ->(wq_completion)nfc6_nci_cmd_wq#20 ->(wq_completion)nfc2_nci_cmd_wq#1235 ->(wq_completion)nfc4_nci_cmd_wq#181 ->(wq_completion)nfc5_nci_cmd_wq#40 ->(wq_completion)nfc2_nci_cmd_wq#1236 ->(wq_completion)nfc6_nci_cmd_wq#21 ->(wq_completion)nfc3_nci_cmd_wq#545 ->(wq_completion)nfc2_nci_cmd_wq#1237 ->(wq_completion)nfc2_nci_cmd_wq#1238 ->(wq_completion)nfc3_nci_cmd_wq#546 ->(wq_completion)nfc4_nci_cmd_wq#182 ->(wq_completion)nfc2_nci_cmd_wq#1239 ->(wq_completion)nfc2_nci_cmd_wq#1240 ->(wq_completion)nfc3_nci_cmd_wq#547 ->(wq_completion)nfc2_nci_cmd_wq#1241 ->(wq_completion)nfc4_nci_cmd_wq#183 ->(wq_completion)nfc2_nci_cmd_wq#1242 ->(wq_completion)nfc3_nci_cmd_wq#548 ->(wq_completion)nfc3_nci_cmd_wq#549 ->(wq_completion)nfc4_nci_cmd_wq#184 ->(wq_completion)nfc5_nci_cmd_wq#41 ->(wq_completion)nfc2_nci_cmd_wq#1243 ->(wq_completion)nfc3_nci_cmd_wq#550 ->(wq_completion)nfc4_nci_cmd_wq#185 ->(wq_completion)nfc2_nci_cmd_wq#1244 ->(wq_completion)nfc3_nci_cmd_wq#551 ->(wq_completion)nfc2_nci_cmd_wq#1245 ->(wq_completion)nfc4_nci_cmd_wq#186 ->(wq_completion)nfc2_nci_cmd_wq#1246 ->(wq_completion)nfc3_nci_cmd_wq#552 ->(wq_completion)nfc2_nci_cmd_wq#1247 ->(wq_completion)nfc3_nci_cmd_wq#553 ->(wq_completion)nfc4_nci_cmd_wq#187 ->(wq_completion)nfc2_nci_cmd_wq#1248 ->(wq_completion)nfc5_nci_cmd_wq#42 ->(wq_completion)nfc3_nci_cmd_wq#554 ->(wq_completion)nfc2_nci_cmd_wq#1249 ->(wq_completion)nfc4_nci_cmd_wq#188 ->(wq_completion)nfc2_nci_cmd_wq#1250 ->(wq_completion)nfc2_nci_cmd_wq#1251 ->(wq_completion)nfc3_nci_cmd_wq#555 ->(wq_completion)nfc2_nci_cmd_wq#1252 ->(wq_completion)nfc2_nci_cmd_wq#1253 ->(wq_completion)nfc3_nci_cmd_wq#556 ->(wq_completion)nfc2_nci_cmd_wq#1254 ->(wq_completion)nfc2_nci_cmd_wq#1255 ->(wq_completion)nfc2_nci_cmd_wq#1256 ->(wq_completion)nfc3_nci_cmd_wq#557 ->(wq_completion)nfc4_nci_cmd_wq#189 ->(wq_completion)nfc2_nci_cmd_wq#1257 ->(wq_completion)nfc2_nci_cmd_wq#1258 ->(wq_completion)nfc2_nci_cmd_wq#1259 ->(wq_completion)nfc3_nci_cmd_wq#559 ->(wq_completion)nfc2_nci_cmd_wq#1260 ->(wq_completion)nfc4_nci_cmd_wq#190 ->(wq_completion)nfc2_nci_cmd_wq#1261 ->(wq_completion)nfc4_nci_cmd_wq#191 ->(wq_completion)nfc3_nci_cmd_wq#560 ->(wq_completion)nfc2_nci_cmd_wq#1262 ->(wq_completion)nfc3_nci_cmd_wq#561 ->(wq_completion)nfc3_nci_cmd_wq#562 ->(wq_completion)nfc2_nci_cmd_wq#1263 ->(wq_completion)nfc3_nci_cmd_wq#563 ->(wq_completion)nfc2_nci_cmd_wq#1264 ->(wq_completion)nfc3_nci_cmd_wq#564 ->(wq_completion)nfc2_nci_cmd_wq#1265 ->(wq_completion)nfc4_nci_cmd_wq#192 ->(wq_completion)nfc5_nci_cmd_wq#43 ->(wq_completion)nfc6_nci_cmd_wq#22 ->(wq_completion)nfc3_nci_cmd_wq#565 ->(wq_completion)nfc2_nci_cmd_wq#1266 ->(wq_completion)nfc2_nci_cmd_wq#1267 ->(wq_completion)nfc3_nci_cmd_wq#566 ->(wq_completion)nfc2_nci_cmd_wq#1268 ->(wq_completion)nfc4_nci_cmd_wq#193 ->(wq_completion)nfc6_nci_cmd_wq#23 ->(wq_completion)nfc2_nci_cmd_wq#1269 ->(wq_completion)nfc3_nci_cmd_wq#567 ->(wq_completion)nfc5_nci_cmd_wq#44 ->(wq_completion)nfc2_nci_cmd_wq#1270 ->(wq_completion)nfc3_nci_cmd_wq#568 ->(wq_completion)nfc2_nci_cmd_wq#1271 ->(wq_completion)nfc2_nci_cmd_wq#1272 ->(wq_completion)nfc2_nci_cmd_wq#1273 ->(wq_completion)nfc3_nci_cmd_wq#569 ->(wq_completion)nfc2_nci_cmd_wq#1274 ->(wq_completion)nfc2_nci_cmd_wq#1275 ->(wq_completion)nfc3_nci_cmd_wq#570 ->(wq_completion)nfc2_nci_cmd_wq#1276 ->(wq_completion)nfc3_nci_cmd_wq#571 ->(wq_completion)nfc2_nci_cmd_wq#1277 ->(wq_completion)nfc4_nci_cmd_wq#194 ->(wq_completion)nfc2_nci_cmd_wq#1278 ->(wq_completion)nfc3_nci_cmd_wq#572 ->(wq_completion)nfc4_nci_cmd_wq#195 ->(wq_completion)nfc2_nci_cmd_wq#1279 ->(wq_completion)nfc3_nci_cmd_wq#573 ->(wq_completion)nfc2_nci_cmd_wq#1280 ->(wq_completion)nfc2_nci_cmd_wq#1281 ->(wq_completion)nfc3_nci_cmd_wq#574 ->(wq_completion)nfc2_nci_cmd_wq#1282 ->(wq_completion)nfc3_nci_cmd_wq#575 ->(wq_completion)nfc2_nci_cmd_wq#1283 ->(wq_completion)nfc2_nci_cmd_wq#1284 ->(wq_completion)nfc3_nci_cmd_wq#576 ->(wq_completion)nfc2_nci_cmd_wq#1285 ->(wq_completion)nfc2_nci_cmd_wq#1286 ->(wq_completion)nfc2_nci_cmd_wq#1287 ->(wq_completion)nfc3_nci_cmd_wq#577 ->(wq_completion)nfc2_nci_cmd_wq#1288 ->(wq_completion)nfc3_nci_cmd_wq#578 ->(wq_completion)nfc2_nci_cmd_wq#1289 ->(wq_completion)nfc3_nci_cmd_wq#579 ->(wq_completion)nfc2_nci_cmd_wq#1290 ->(wq_completion)nfc4_nci_cmd_wq#196 ->(wq_completion)nfc3_nci_cmd_wq#580 ->(wq_completion)nfc2_nci_cmd_wq#1291 ->(wq_completion)nfc3_nci_cmd_wq#582 ->(wq_completion)nfc2_nci_cmd_wq#1292 ->(wq_completion)nfc4_nci_cmd_wq#197 ->(wq_completion)nfc2_nci_cmd_wq#1293 ->(wq_completion)nfc3_nci_cmd_wq#583 ->(wq_completion)nfc4_nci_cmd_wq#198 ->(wq_completion)nfc3_nci_cmd_wq#584 ->(wq_completion)nfc2_nci_cmd_wq#1294 ->(wq_completion)nfc3_nci_cmd_wq#585 ->(wq_completion)nfc2_nci_cmd_wq#1296 ->(wq_completion)nfc3_nci_cmd_wq#587 ->(wq_completion)nfc2_nci_cmd_wq#1297 ->(wq_completion)nfc2_nci_cmd_wq#1298 ->(wq_completion)nfc3_nci_cmd_wq#588 ->(wq_completion)nfc4_nci_cmd_wq#199 ->(wq_completion)nfc2_nci_cmd_wq#1299 ->(wq_completion)nfc2_nci_cmd_wq#1300 ->(wq_completion)nfc4_nci_cmd_wq#200 ->(wq_completion)nfc5_nci_cmd_wq#45 ->(wq_completion)nfc3_nci_cmd_wq#589 ->(wq_completion)nfc2_nci_cmd_wq#1301 ->(wq_completion)nfc3_nci_cmd_wq#590 ->(wq_completion)nfc2_nci_cmd_wq#1302 ->(wq_completion)nfc4_nci_cmd_wq#201 ->(wq_completion)nfc2_nci_cmd_wq#1303 ->(wq_completion)nfc3_nci_cmd_wq#591 ->(wq_completion)nfc2_nci_cmd_wq#1304 ->(wq_completion)nfc3_nci_cmd_wq#592 ->(wq_completion)nfc2_nci_cmd_wq#1305 ->(wq_completion)nfc4_nci_cmd_wq#202 ->(wq_completion)nfc2_nci_cmd_wq#1306 ->(wq_completion)nfc2_nci_cmd_wq#1307 ->(wq_completion)nfc3_nci_cmd_wq#593 ->(wq_completion)nfc4_nci_cmd_wq#203 ->(wq_completion)nfc2_nci_cmd_wq#1308 ->(wq_completion)nfc5_nci_cmd_wq#46 ->(wq_completion)nfc3_nci_cmd_wq#594 ->(wq_completion)nfc4_nci_cmd_wq#204 ->(wq_completion)nfc2_nci_cmd_wq#1309 ->(wq_completion)nfc5_nci_cmd_wq#47 ->(wq_completion)nfc3_nci_cmd_wq#595 ->(wq_completion)nfc4_nci_cmd_wq#205 ->(wq_completion)nfc2_nci_cmd_wq#1310 ->(wq_completion)nfc3_nci_cmd_wq#596 ->(wq_completion)nfc2_nci_cmd_wq#1311 ->(wq_completion)nfc4_nci_cmd_wq#206 ->(wq_completion)nfc6_nci_cmd_wq#24 ->(wq_completion)nfc3_nci_cmd_wq#597 ->(wq_completion)nfc2_nci_cmd_wq#1312 ->(wq_completion)nfc5_nci_cmd_wq#48 ->(wq_completion)nfc2_nci_cmd_wq#1313 ->(wq_completion)nfc3_nci_cmd_wq#598 ->(wq_completion)nfc3_nci_cmd_wq#599 ->(wq_completion)nfc2_nci_cmd_wq#1314 ->(wq_completion)nfc4_nci_cmd_wq#207 ->(wq_completion)nfc5_nci_cmd_wq#49 ->(wq_completion)nfc2_nci_cmd_wq#1315 ->(wq_completion)nfc3_nci_cmd_wq#600 ->(wq_completion)nfc2_nci_cmd_wq#1316 ->(wq_completion)nfc4_nci_cmd_wq#208 ->(wq_completion)nfc2_nci_cmd_wq#1317 ->(wq_completion)nfc3_nci_cmd_wq#601 ->(wq_completion)nfc2_nci_cmd_wq#1318 ->(wq_completion)nfc3_nci_cmd_wq#602 ->(wq_completion)nfc2_nci_cmd_wq#1319 ->(wq_completion)nfc3_nci_cmd_wq#603 ->(wq_completion)nfc4_nci_cmd_wq#209 ->(wq_completion)nfc2_nci_cmd_wq#1320 ->(wq_completion)nfc2_nci_cmd_wq#1321 ->(wq_completion)nfc3_nci_cmd_wq#604 ->(wq_completion)nfc3_nci_cmd_wq#605 ->(wq_completion)nfc2_nci_cmd_wq#1322 ->(wq_completion)nfc2_nci_cmd_wq#1323 ->(wq_completion)nfc2_nci_cmd_wq#1324 ->(wq_completion)nfc3_nci_cmd_wq#606 ->(wq_completion)nfc2_nci_cmd_wq#1325 ->(wq_completion)nfc4_nci_cmd_wq#210 ->(wq_completion)nfc2_nci_cmd_wq#1326 ->(wq_completion)nfc3_nci_cmd_wq#607 ->(wq_completion)nfc2_nci_cmd_wq#1327 ->(wq_completion)nfc3_nci_cmd_wq#608 ->(wq_completion)nfc2_nci_cmd_wq#1328 ->(wq_completion)nfc3_nci_cmd_wq#609 ->(wq_completion)nfc4_nci_cmd_wq#211 ->(wq_completion)nfc2_nci_cmd_wq#1330 ->(wq_completion)nfc3_nci_cmd_wq#610 ->(wq_completion)nfc4_nci_cmd_wq#212 ->(wq_completion)nfc3_nci_cmd_wq#611 ->(wq_completion)nfc2_nci_cmd_wq#1331 ->(wq_completion)nfc4_nci_cmd_wq#213 ->(wq_completion)nfc2_nci_cmd_wq#1332 ->(wq_completion)nfc3_nci_cmd_wq#612 ->(wq_completion)nfc4_nci_cmd_wq#214 ->(wq_completion)nfc2_nci_cmd_wq#1333 ->(wq_completion)nfc3_nci_cmd_wq#613 ->(wq_completion)nfc3_nci_cmd_wq#614 ->(wq_completion)nfc2_nci_cmd_wq#1335 ->(wq_completion)nfc4_nci_cmd_wq#215 ->(wq_completion)nfc5_nci_cmd_wq#50 ->(wq_completion)nfc6_nci_cmd_wq#25 ->(wq_completion)nfc2_nci_cmd_wq#1336 ->(wq_completion)nfc3_nci_cmd_wq#615 ->(wq_completion)nfc2_nci_cmd_wq#1337 ->(wq_completion)nfc4_nci_cmd_wq#216 ->(wq_completion)nfc2_nci_cmd_wq#1338 ->(wq_completion)nfc2_nci_cmd_wq#1339 ->(wq_completion)nfc3_nci_cmd_wq#616 ->(wq_completion)nfc4_nci_cmd_wq#217 ->(wq_completion)nfc2_nci_cmd_wq#1340 ->(wq_completion)nfc5_nci_cmd_wq#51 ->(wq_completion)nfc2_nci_cmd_wq#1341 ->(wq_completion)nfc9_nci_cmd_wq#12 ->(wq_completion)nfc10_nci_cmd_wq#10 ->(wq_completion)nfc9_nci_cmd_wq#13 ->(wq_completion)nfc17_nci_cmd_wq#11 ->(wq_completion)nfc18_nci_cmd_wq#7 ->(wq_completion)nfc18_nci_cmd_wq#8 ->(wq_completion)nfc29_nci_cmd_wq#6 ->(wq_completion)nfc30_nci_cmd_wq#6 ->(wq_completion)nfc28_nci_cmd_wq#10 ->(wq_completion)nfc35_nci_cmd_wq#5 ->(wq_completion)nfc37_nci_cmd_wq#6 ->(wq_completion)nfc3_nci_cmd_wq#617 ->(wq_completion)nfc2_nci_cmd_wq#1342 ->(wq_completion)nfc36_nci_cmd_wq#7 ->(wq_completion)nfc4_nci_cmd_wq#218 ->(wq_completion)nfc30_nci_cmd_wq#7 ->(wq_completion)nfc5_nci_cmd_wq#52 ->(wq_completion)nfc6_nci_cmd_wq#26 ->(wq_completion)nfc7_nci_cmd_wq#18 ->(wq_completion)nfc8_nci_cmd_wq#11 ->(wq_completion)nfc11_nci_cmd_wq#10 ->(wq_completion)nfc10_nci_cmd_wq#11 ->(wq_completion)nfc9_nci_cmd_wq#14 ->(wq_completion)nfc4_nci_cmd_wq#219 ->(wq_completion)nfc12_nci_cmd_wq#12 ->(wq_completion)nfc13_nci_cmd_wq#11 ->(wq_completion)nfc14_nci_cmd_wq#10 ->(wq_completion)nfc15_nci_cmd_wq#8 ->(wq_completion)nfc16_nci_cmd_wq#9 ->(wq_completion)nfc5_nci_cmd_wq#53 ->(wq_completion)nfc17_nci_cmd_wq#12 ->(wq_completion)nfc19_nci_cmd_wq#8 ->(wq_completion)nfc8_nci_cmd_wq#12 ->(wq_completion)nfc20_nci_cmd_wq#6 ->(wq_completion)nfc7_nci_cmd_wq#19 ->(wq_completion)nfc21_nci_cmd_wq#7 ->(wq_completion)nfc18_nci_cmd_wq#9 ->(wq_completion)nfc22_nci_cmd_wq#8 ->(wq_completion)nfc23_nci_cmd_wq#6 ->(wq_completion)nfc24_nci_cmd_wq#6 ->(wq_completion)nfc25_nci_cmd_wq#7 ->(wq_completion)nfc26_nci_cmd_wq#8 ->(wq_completion)nfc27_nci_cmd_wq#6 ->(wq_completion)nfc31_nci_cmd_wq#7 ->(wq_completion)nfc3_nci_cmd_wq#618 ->(wq_completion)nfc2_nci_cmd_wq#1343 ->(wq_completion)nfc6_nci_cmd_wq#27 ->(wq_completion)nfc4_nci_cmd_wq#220 ->(wq_completion)nfc9_nci_cmd_wq#15 ->(wq_completion)nfc5_nci_cmd_wq#54 ->(wq_completion)nfc7_nci_cmd_wq#20 ->(wq_completion)nfc8_nci_cmd_wq#13 ->(wq_completion)nfc10_nci_cmd_wq#12 ->(wq_completion)nfc11_nci_cmd_wq#11 ->(wq_completion)nfc12_nci_cmd_wq#13 ->(wq_completion)nfc13_nci_cmd_wq#12 ->(wq_completion)nfc14_nci_cmd_wq#11 ->(wq_completion)nfc15_nci_cmd_wq#9 ->(wq_completion)nfc16_nci_cmd_wq#10 ->(wq_completion)nfc17_nci_cmd_wq#13 ->(wq_completion)nfc19_nci_cmd_wq#9 ->(wq_completion)nfc38_nci_cmd_wq#4 ->(wq_completion)nfc34_nci_cmd_wq#7 ->(wq_completion)nfc33_nci_cmd_wq#6 ->(wq_completion)nfc29_nci_cmd_wq#7 ->(wq_completion)nfc28_nci_cmd_wq#11 ->(wq_completion)nfc32_nci_cmd_wq#8 ->(wq_completion)nfc10_nci_cmd_wq#13 ->(wq_completion)nfc9_nci_cmd_wq#16 ->(wq_completion)nfc8_nci_cmd_wq#14 ->(wq_completion)nfc7_nci_cmd_wq#21 ->(wq_completion)nfc5_nci_cmd_wq#55 ->(wq_completion)nfc6_nci_cmd_wq#28 ->(wq_completion)nfc4_nci_cmd_wq#221 ->(wq_completion)nfc2_nci_cmd_wq#1344 ->(wq_completion)nfc3_nci_cmd_wq#619 ->(wq_completion)nfc18_nci_cmd_wq#10 ->(wq_completion)nfc2_nci_cmd_wq#1345 ->(wq_completion)nfc3_nci_cmd_wq#620 ->(wq_completion)nfc2_nci_cmd_wq#1346 ->(wq_completion)nfc3_nci_cmd_wq#621 ->(wq_completion)nfc2_nci_cmd_wq#1347 ->(wq_completion)nfc4_nci_cmd_wq#222 ->(wq_completion)nfc3_nci_cmd_wq#622 ->(wq_completion)nfc4_nci_cmd_wq#223 ->(wq_completion)nfc2_nci_cmd_wq#1348 ->(wq_completion)nfc3_nci_cmd_wq#623 ->(wq_completion)nfc4_nci_cmd_wq#224 ->(wq_completion)nfc2_nci_cmd_wq#1349 ->(wq_completion)nfc3_nci_cmd_wq#624 ->(wq_completion)nfc2_nci_cmd_wq#1350 ->(wq_completion)nfc4_nci_cmd_wq#225 ->(wq_completion)nfc3_nci_cmd_wq#625 ->(wq_completion)nfc5_nci_cmd_wq#56 ->(wq_completion)nfc2_nci_cmd_wq#1351 ->(wq_completion)nfc3_nci_cmd_wq#626 ->(wq_completion)nfc3_nci_cmd_wq#627 ->(wq_completion)nfc4_nci_cmd_wq#226 ->(wq_completion)nfc5_nci_cmd_wq#57 ->(wq_completion)nfc2_nci_cmd_wq#1352 ->(wq_completion)nfc2_nci_cmd_wq#1353 ->(wq_completion)nfc3_nci_cmd_wq#629 ->(wq_completion)nfc2_nci_cmd_wq#1354 ->(wq_completion)nfc2_nci_cmd_wq#1355 ->(wq_completion)nfc3_nci_cmd_wq#630 ->(wq_completion)nfc4_nci_cmd_wq#227 ->(wq_completion)nfc2_nci_cmd_wq#1356 ->(wq_completion)nfc3_nci_cmd_wq#631 ->(wq_completion)nfc2_nci_cmd_wq#1357 ->(wq_completion)nfc3_nci_cmd_wq#632 ->(wq_completion)nfc2_nci_cmd_wq#1358 ->(wq_completion)nfc3_nci_cmd_wq#633 ->(wq_completion)nfc4_nci_cmd_wq#228 ->(wq_completion)nfc3_nci_cmd_wq#634 ->(wq_completion)nfc2_nci_cmd_wq#1359 ->(wq_completion)nfc3_nci_cmd_wq#635 ->(wq_completion)nfc2_nci_cmd_wq#1360 ->(wq_completion)nfc2_nci_cmd_wq#1361 ->(wq_completion)nfc3_nci_cmd_wq#636 ->(wq_completion)nfc2_nci_cmd_wq#1362 ->(wq_completion)nfc3_nci_cmd_wq#637 ->(wq_completion)nfc4_nci_cmd_wq#229 ->(wq_completion)nfc2_nci_cmd_wq#1363 ->(wq_completion)nfc3_nci_cmd_wq#638 ->(wq_completion)nfc2_nci_cmd_wq#1364 ->(wq_completion)nfc2_nci_cmd_wq#1365 ->(wq_completion)nfc4_nci_cmd_wq#230 ->(wq_completion)nfc3_nci_cmd_wq#639 ->(wq_completion)nfc2_nci_cmd_wq#1366 ->(wq_completion)nfc4_nci_cmd_wq#231 ->(wq_completion)nfc3_nci_cmd_wq#640 ->(wq_completion)nfc2_nci_cmd_wq#1367 ->(wq_completion)nfc2_nci_cmd_wq#1368 ->(wq_completion)nfc3_nci_cmd_wq#641 ->(wq_completion)nfc2_nci_cmd_wq#1369 ->(wq_completion)nfc2_nci_cmd_wq#1370 ->(wq_completion)nfc4_nci_cmd_wq#232 ->(wq_completion)nfc3_nci_cmd_wq#642 ->(wq_completion)nfc5_nci_cmd_wq#58 ->(wq_completion)nfc2_nci_cmd_wq#1371 ->(wq_completion)nfc3_nci_cmd_wq#643 ->(wq_completion)nfc2_nci_cmd_wq#1372 ->(wq_completion)nfc3_nci_cmd_wq#644 ->(wq_completion)nfc2_nci_cmd_wq#1373 ->(wq_completion)nfc3_nci_cmd_wq#645 ->(wq_completion)nfc4_nci_cmd_wq#233 ->(wq_completion)nfc2_nci_cmd_wq#1374 ->(wq_completion)nfc3_nci_cmd_wq#646 ->(wq_completion)nfc2_nci_cmd_wq#1375 ->(wq_completion)nfc2_nci_cmd_wq#1376 ->(wq_completion)nfc2_nci_cmd_wq#1377 ->(wq_completion)nfc2_nci_cmd_wq#1378 ->(wq_completion)nfc2_nci_cmd_wq#1379 ->(wq_completion)nfc3_nci_cmd_wq#647 ->(wq_completion)nfc4_nci_cmd_wq#234 ->(wq_completion)nfc2_nci_cmd_wq#1380 ->(wq_completion)nfc2_nci_cmd_wq#1381 ->(wq_completion)nfc4_nci_cmd_wq#235 ->(wq_completion)nfc3_nci_cmd_wq#648 ->(wq_completion)nfc2_nci_cmd_wq#1382 ->(wq_completion)nfc2_nci_cmd_wq#1383 ->(wq_completion)nfc3_nci_cmd_wq#649 ->(wq_completion)nfc4_nci_cmd_wq#236 ->(wq_completion)nfc2_nci_cmd_wq#1384 ->(wq_completion)nfc3_nci_cmd_wq#650 ->(wq_completion)nfc2_nci_cmd_wq#1385 ->(wq_completion)nfc4_nci_cmd_wq#237 ->(wq_completion)nfc2_nci_cmd_wq#1386 ->(wq_completion)nfc3_nci_cmd_wq#651 ->(wq_completion)nfc2_nci_cmd_wq#1387 ->(wq_completion)nfc2_nci_cmd_wq#1388 ->(wq_completion)nfc3_nci_cmd_wq#652 ->(wq_completion)nfc4_nci_cmd_wq#238 ->(wq_completion)nfc2_nci_cmd_wq#1389 ->(wq_completion)nfc3_nci_cmd_wq#653 ->(wq_completion)nfc2_nci_cmd_wq#1390 ->(wq_completion)nfc4_nci_cmd_wq#239 ->(wq_completion)nfc2_nci_cmd_wq#1391 ->(wq_completion)nfc3_nci_cmd_wq#654 ->(wq_completion)nfc2_nci_cmd_wq#1392 ->(wq_completion)nfc4_nci_cmd_wq#240 ->(wq_completion)nfc3_nci_cmd_wq#655 ->(wq_completion)nfc2_nci_cmd_wq#1393 ->(wq_completion)nfc3_nci_cmd_wq#656 ->(wq_completion)nfc2_nci_cmd_wq#1394 ->(wq_completion)nfc3_nci_cmd_wq#657 ->(wq_completion)nfc4_nci_cmd_wq#241 ->(wq_completion)nfc3_nci_cmd_wq#658 ->(wq_completion)nfc2_nci_cmd_wq#1395 ->(wq_completion)nfc5_nci_cmd_wq#60 ->(wq_completion)nfc2_nci_cmd_wq#1396 ->(wq_completion)nfc3_nci_cmd_wq#659 ->(wq_completion)nfc2_nci_cmd_wq#1397 ->(wq_completion)nfc2_nci_cmd_wq#1398 ->(wq_completion)nfc3_nci_cmd_wq#660 ->(wq_completion)nfc2_nci_cmd_wq#1399 ->(wq_completion)nfc3_nci_cmd_wq#661 ->(wq_completion)nfc4_nci_cmd_wq#242 ->(wq_completion)nfc2_nci_cmd_wq#1400 ->(wq_completion)nfc3_nci_cmd_wq#662 ->(wq_completion)nfc2_nci_cmd_wq#1401 ->(wq_completion)nfc3_nci_cmd_wq#663 ->(wq_completion)nfc2_nci_cmd_wq#1402 ->(wq_completion)nfc2_nci_cmd_wq#1403 ->(wq_completion)nfc3_nci_cmd_wq#664 ->(wq_completion)nfc4_nci_cmd_wq#243 ->(wq_completion)nfc2_nci_cmd_wq#1404 ->(wq_completion)nfc3_nci_cmd_wq#665 ->(wq_completion)nfc4_nci_cmd_wq#244 ->(wq_completion)nfc2_nci_cmd_wq#1405 ->(wq_completion)nfc3_nci_cmd_wq#666 ->(wq_completion)nfc2_nci_cmd_wq#1406 ->(wq_completion)nfc3_nci_cmd_wq#667 ->(wq_completion)nfc4_nci_cmd_wq#245 ->(wq_completion)nfc5_nci_cmd_wq#61 ->(wq_completion)nfc7_nci_cmd_wq#22 ->(wq_completion)nfc10_nci_cmd_wq#14 ->(wq_completion)nfc12_nci_cmd_wq#14 ->(wq_completion)nfc13_nci_cmd_wq#13 ->(wq_completion)nfc13_nci_cmd_wq#14 ->(wq_completion)nfc19_nci_cmd_wq#10 ->(wq_completion)nfc13_nci_cmd_wq#15 ->(wq_completion)nfc13_nci_cmd_wq#16 ->(wq_completion)nfc24_nci_cmd_wq#7 ->(wq_completion)nfc26_nci_cmd_wq#9 ->(wq_completion)nfc28_nci_cmd_wq#12 ->(wq_completion)nfc2_nci_cmd_wq#1407 ->(wq_completion)nfc3_nci_cmd_wq#668 ->(wq_completion)nfc6_nci_cmd_wq#29 ->(wq_completion)nfc4_nci_cmd_wq#246 ->(wq_completion)nfc8_nci_cmd_wq#15 ->(wq_completion)nfc9_nci_cmd_wq#17 ->(wq_completion)nfc5_nci_cmd_wq#62 ->(wq_completion)nfc7_nci_cmd_wq#23 ->(wq_completion)nfc11_nci_cmd_wq#12 ->(wq_completion)nfc10_nci_cmd_wq#15 ->(wq_completion)nfc12_nci_cmd_wq#15 ->(wq_completion)nfc14_nci_cmd_wq#12 ->(wq_completion)nfc15_nci_cmd_wq#10 ->(wq_completion)nfc16_nci_cmd_wq#11 ->(wq_completion)nfc17_nci_cmd_wq#14 ->(wq_completion)nfc18_nci_cmd_wq#11 ->(wq_completion)nfc20_nci_cmd_wq#7 ->(wq_completion)nfc21_nci_cmd_wq#8 ->(wq_completion)nfc22_nci_cmd_wq#9 ->(wq_completion)nfc19_nci_cmd_wq#11 ->(wq_completion)nfc23_nci_cmd_wq#7 ->(wq_completion)nfc13_nci_cmd_wq#17 ->(wq_completion)nfc25_nci_cmd_wq#8 ->(wq_completion)nfc24_nci_cmd_wq#8 ->(wq_completion)nfc27_nci_cmd_wq#7 ->(wq_completion)nfc26_nci_cmd_wq#10 ->(wq_completion)nfc34_nci_cmd_wq#8 ->(wq_completion)nfc33_nci_cmd_wq#7 ->(wq_completion)nfc32_nci_cmd_wq#9 ->(wq_completion)nfc31_nci_cmd_wq#8 ->(wq_completion)nfc30_nci_cmd_wq#8 ->(wq_completion)nfc28_nci_cmd_wq#13 ->(wq_completion)nfc29_nci_cmd_wq#8 ->(wq_completion)nfc2_nci_cmd_wq#1408 ->(wq_completion)nfc3_nci_cmd_wq#669 ->(wq_completion)nfc2_nci_cmd_wq#1409 ->(wq_completion)nfc3_nci_cmd_wq#670 ->(wq_completion)nfc3_nci_cmd_wq#671 ->(wq_completion)nfc2_nci_cmd_wq#1410 ->(wq_completion)nfc3_nci_cmd_wq#672 ->(wq_completion)nfc2_nci_cmd_wq#1411 ->(wq_completion)nfc3_nci_cmd_wq#673 ->(wq_completion)nfc2_nci_cmd_wq#1412 ->(wq_completion)nfc2_nci_cmd_wq#1413 ->(wq_completion)nfc2_nci_cmd_wq#1414 ->(wq_completion)nfc3_nci_cmd_wq#674 ->(wq_completion)nfc2_nci_cmd_wq#1415 ->(wq_completion)nfc3_nci_cmd_wq#675 ->(wq_completion)nfc3_nci_cmd_wq#676 ->(wq_completion)nfc2_nci_cmd_wq#1416 ->(wq_completion)nfc4_nci_cmd_wq#247 ->(wq_completion)nfc2_nci_cmd_wq#1417 ->(wq_completion)nfc2_nci_cmd_wq#1418 ->(wq_completion)nfc4_nci_cmd_wq#248 ->(wq_completion)nfc5_nci_cmd_wq#63 ->(wq_completion)nfc3_nci_cmd_wq#677 ->(wq_completion)nfc4_nci_cmd_wq#249 ->(wq_completion)nfc2_nci_cmd_wq#1419 ->(wq_completion)nfc3_nci_cmd_wq#678 ->(wq_completion)nfc2_nci_cmd_wq#1420 ->(wq_completion)nfc2_nci_cmd_wq#1421 ->(wq_completion)nfc3_nci_cmd_wq#679 ->(wq_completion)nfc2_nci_cmd_wq#1422 ->(wq_completion)nfc3_nci_cmd_wq#680 ->(wq_completion)nfc2_nci_cmd_wq#1423 ->(wq_completion)nfc2_nci_cmd_wq#1424 ->(wq_completion)nfc8_nci_cmd_wq#16 ->(wq_completion)nfc2_nci_cmd_wq#1425 ->(wq_completion)nfc11_nci_cmd_wq#13 ->(wq_completion)nfc2_nci_cmd_wq#1426 ->(wq_completion)nfc13_nci_cmd_wq#19 ->(wq_completion)nfc13_nci_cmd_wq#20 ->(wq_completion)nfc15_nci_cmd_wq#11 ->(wq_completion)nfc15_nci_cmd_wq#12 ->(wq_completion)nfc18_nci_cmd_wq#12 ->(wq_completion)nfc20_nci_cmd_wq#8 ->(wq_completion)nfc22_nci_cmd_wq#10 ->(wq_completion)nfc23_nci_cmd_wq#8 ->(wq_completion)nfc3_nci_cmd_wq#681 ->(wq_completion)nfc4_nci_cmd_wq#250 ->(wq_completion)nfc5_nci_cmd_wq#64 ->(wq_completion)nfc6_nci_cmd_wq#30 ->(wq_completion)nfc7_nci_cmd_wq#24 ->(wq_completion)nfc9_nci_cmd_wq#18 ->(wq_completion)nfc8_nci_cmd_wq#17 ->(wq_completion)nfc10_nci_cmd_wq#16 ->(wq_completion)nfc12_nci_cmd_wq#16 ->(wq_completion)nfc11_nci_cmd_wq#14 ->(wq_completion)nfc14_nci_cmd_wq#13 ->(wq_completion)nfc2_nci_cmd_wq#1427 ->(wq_completion)nfc13_nci_cmd_wq#21 ->(wq_completion)nfc16_nci_cmd_wq#12 ->(wq_completion)nfc17_nci_cmd_wq#15 ->(wq_completion)nfc15_nci_cmd_wq#13 ->(wq_completion)nfc19_nci_cmd_wq#12 ->(wq_completion)nfc18_nci_cmd_wq#13 ->(wq_completion)nfc20_nci_cmd_wq#9 ->(wq_completion)nfc21_nci_cmd_wq#11 ->(wq_completion)nfc22_nci_cmd_wq#11 ->(wq_completion)nfc24_nci_cmd_wq#9 ->(wq_completion)nfc25_nci_cmd_wq#9 ->(wq_completion)nfc26_nci_cmd_wq#11 ->(wq_completion)nfc27_nci_cmd_wq#8 ->(wq_completion)nfc23_nci_cmd_wq#9 ->(wq_completion)nfc28_nci_cmd_wq#14 ->(wq_completion)nfc42_nci_cmd_wq#2 ->(wq_completion)nfc3_nci_cmd_wq#682 ->(wq_completion)nfc4_nci_cmd_wq#251 ->(wq_completion)nfc5_nci_cmd_wq#65 ->(wq_completion)nfc6_nci_cmd_wq#31 ->(wq_completion)nfc7_nci_cmd_wq#25 ->(wq_completion)nfc9_nci_cmd_wq#19 ->(wq_completion)nfc8_nci_cmd_wq#18 ->(wq_completion)nfc10_nci_cmd_wq#17 ->(wq_completion)nfc12_nci_cmd_wq#17 ->(wq_completion)nfc11_nci_cmd_wq#15 ->(wq_completion)nfc14_nci_cmd_wq#14 ->(wq_completion)nfc2_nci_cmd_wq#1428 ->(wq_completion)nfc13_nci_cmd_wq#22 ->(wq_completion)nfc16_nci_cmd_wq#13 ->(wq_completion)nfc15_nci_cmd_wq#14 ->(wq_completion)nfc17_nci_cmd_wq#16 ->(wq_completion)nfc50_nci_cmd_wq ->(wq_completion)nfc49_nci_cmd_wq ->(wq_completion)nfc48_nci_cmd_wq ->(wq_completion)nfc47_nci_cmd_wq ->(wq_completion)nfc46_nci_cmd_wq ->(wq_completion)nfc45_nci_cmd_wq ->(wq_completion)nfc44_nci_cmd_wq ->(wq_completion)nfc43_nci_cmd_wq ->(wq_completion)nfc41_nci_cmd_wq#2 ->(wq_completion)nfc40_nci_cmd_wq#3 ->(wq_completion)nfc39_nci_cmd_wq#4 ->(wq_completion)nfc38_nci_cmd_wq#5 ->(wq_completion)nfc37_nci_cmd_wq#7 ->(wq_completion)nfc36_nci_cmd_wq#8 ->(wq_completion)nfc35_nci_cmd_wq#6 ->(wq_completion)nfc34_nci_cmd_wq#9 ->(wq_completion)nfc33_nci_cmd_wq#8 ->(wq_completion)nfc32_nci_cmd_wq#10 ->(wq_completion)nfc31_nci_cmd_wq#9 ->(wq_completion)nfc30_nci_cmd_wq#9 ->(wq_completion)nfc29_nci_cmd_wq#9 ->(wq_completion)nfc4_nci_cmd_wq#252 ->(wq_completion)nfc5_nci_cmd_wq#66 ->(wq_completion)nfc2_nci_cmd_wq#1429 ->(wq_completion)nfc3_nci_cmd_wq#683 ->(wq_completion)nfc4_nci_cmd_wq#253 ->(wq_completion)nfc2_nci_cmd_wq#1430 ->(wq_completion)nfc3_nci_cmd_wq#684 ->(wq_completion)nfc4_nci_cmd_wq#254 ->(wq_completion)nfc5_nci_cmd_wq#67 ->(wq_completion)nfc2_nci_cmd_wq#1431 ->(wq_completion)nfc3_nci_cmd_wq#685 ->(wq_completion)nfc6_nci_cmd_wq#32 ->(wq_completion)nfc4_nci_cmd_wq#255 ->(wq_completion)nfc2_nci_cmd_wq#1432 ->(wq_completion)nfc2_nci_cmd_wq#1433 ->(wq_completion)nfc3_nci_cmd_wq#686 ->(wq_completion)nfc2_nci_cmd_wq#1434 ->(wq_completion)nfc3_nci_cmd_wq#687 ->(wq_completion)nfc2_nci_cmd_wq#1435 ->(wq_completion)nfc2_nci_cmd_wq#1436 ->(wq_completion)nfc3_nci_cmd_wq#688 ->(wq_completion)nfc4_nci_cmd_wq#256 ->(wq_completion)nfc3_nci_cmd_wq#689 ->(wq_completion)nfc2_nci_cmd_wq#1437 ->(wq_completion)nfc4_nci_cmd_wq#257 ->(wq_completion)nfc2_nci_cmd_wq#1438 ->(wq_completion)nfc5_nci_cmd_wq#68 ->(wq_completion)nfc2_nci_cmd_wq#1439 ->(wq_completion)nfc3_nci_cmd_wq#690 ->(wq_completion)nfc3_nci_cmd_wq#691 ->(wq_completion)nfc2_nci_cmd_wq#1440 ->(wq_completion)nfc3_nci_cmd_wq#692 ->(wq_completion)nfc2_nci_cmd_wq#1441 ->(wq_completion)nfc3_nci_cmd_wq#693 ->(wq_completion)nfc4_nci_cmd_wq#258 ->(wq_completion)nfc2_nci_cmd_wq#1442 ->(wq_completion)nfc3_nci_cmd_wq#694 ->(wq_completion)nfc2_nci_cmd_wq#1443 ->(wq_completion)nfc2_nci_cmd_wq#1444 ->(wq_completion)nfc3_nci_cmd_wq#695 ->(wq_completion)nfc4_nci_cmd_wq#259 ->(wq_completion)nfc3_nci_cmd_wq#696 ->(wq_completion)nfc5_nci_cmd_wq#69 ->(wq_completion)nfc2_nci_cmd_wq#1445 ->(wq_completion)nfc4_nci_cmd_wq#260 ->(wq_completion)nfc3_nci_cmd_wq#697 ->(wq_completion)nfc2_nci_cmd_wq#1446 ->(wq_completion)nfc2_nci_cmd_wq#1447 ->(wq_completion)nfc3_nci_cmd_wq#698 ->(wq_completion)nfc4_nci_cmd_wq#261 ->(wq_completion)nfc5_nci_cmd_wq#70 ->(wq_completion)nfc2_nci_cmd_wq#1448 ->(wq_completion)nfc3_nci_cmd_wq#699 ->(wq_completion)nfc4_nci_cmd_wq#262 ->(wq_completion)nfc2_nci_cmd_wq#1449 ->(wq_completion)nfc3_nci_cmd_wq#700 ->(wq_completion)nfc4_nci_cmd_wq#263 ->(wq_completion)nfc2_nci_cmd_wq#1450 ->(wq_completion)nfc3_nci_cmd_wq#701 ->(wq_completion)nfc5_nci_cmd_wq#71 ->(wq_completion)nfc4_nci_cmd_wq#264 ->(wq_completion)nfc2_nci_cmd_wq#1451 ->(wq_completion)nfc3_nci_cmd_wq#702 ->(wq_completion)nfc3_nci_cmd_wq#703 ->(wq_completion)nfc4_nci_cmd_wq#265 ->(wq_completion)nfc2_nci_cmd_wq#1452 ->(wq_completion)nfc5_nci_cmd_wq#72 ->(wq_completion)nfc2_nci_cmd_wq#1453 ->(wq_completion)nfc4_nci_cmd_wq#266 ->(wq_completion)nfc2_nci_cmd_wq#1454 ->(wq_completion)nfc4_nci_cmd_wq#267 ->(wq_completion)nfc3_nci_cmd_wq#704 ->(wq_completion)nfc2_nci_cmd_wq#1455 ->(wq_completion)nfc4_nci_cmd_wq#268 ->(wq_completion)nfc3_nci_cmd_wq#705 ->(wq_completion)nfc2_nci_cmd_wq#1456 ->(wq_completion)nfc3_nci_cmd_wq#706 ->(wq_completion)nfc4_nci_cmd_wq#269 ->(wq_completion)nfc2_nci_cmd_wq#1457 ->(wq_completion)nfc3_nci_cmd_wq#707 ->(wq_completion)nfc4_nci_cmd_wq#270 ->(wq_completion)nfc2_nci_cmd_wq#1458 ->(wq_completion)nfc3_nci_cmd_wq#708 ->(wq_completion)nfc5_nci_cmd_wq#73 ->(wq_completion)nfc4_nci_cmd_wq#272 ->(wq_completion)nfc2_nci_cmd_wq#1459 ->(wq_completion)nfc3_nci_cmd_wq#709 ->(wq_completion)nfc2_nci_cmd_wq#1460 ->(wq_completion)nfc2_nci_cmd_wq#1461 ->(wq_completion)nfc3_nci_cmd_wq#710 ->(wq_completion)nfc2_nci_cmd_wq#1462 ->(wq_completion)nfc3_nci_cmd_wq#711 ->(wq_completion)nfc2_nci_cmd_wq#1463 ->(wq_completion)nfc2_nci_cmd_wq#1464 ->(wq_completion)nfc3_nci_cmd_wq#712 ->(wq_completion)nfc2_nci_cmd_wq#1465 ->(wq_completion)nfc3_nci_cmd_wq#713 ->(wq_completion)nfc2_nci_cmd_wq#1466 ->(wq_completion)nfc3_nci_cmd_wq#714 ->(wq_completion)nfc4_nci_cmd_wq#273 ->(wq_completion)nfc2_nci_cmd_wq#1467 ->(wq_completion)nfc3_nci_cmd_wq#715 ->(wq_completion)nfc2_nci_cmd_wq#1468 ->(wq_completion)nfc3_nci_cmd_wq#716 ->(wq_completion)nfc4_nci_cmd_wq#274 ->(wq_completion)nfc2_nci_cmd_wq#1469 ->(wq_completion)nfc2_nci_cmd_wq#1470 ->(wq_completion)nfc2_nci_cmd_wq#1471 ->(wq_completion)nfc3_nci_cmd_wq#717 ->(wq_completion)nfc2_nci_cmd_wq#1472 ->(wq_completion)nfc3_nci_cmd_wq#718 ->(wq_completion)nfc2_nci_cmd_wq#1473 ->(wq_completion)nfc3_nci_cmd_wq#719 ->(wq_completion)nfc2_nci_cmd_wq#1474 ->(wq_completion)nfc4_nci_cmd_wq#275 ->(wq_completion)nfc5_nci_cmd_wq#74 ->(wq_completion)nfc11_nci_cmd_wq#16 ->(wq_completion)nfc12_nci_cmd_wq#18 ->(wq_completion)nfc11_nci_cmd_wq#17 ->(wq_completion)nfc17_nci_cmd_wq#17 ->(wq_completion)nfc18_nci_cmd_wq#14 ->(wq_completion)nfc17_nci_cmd_wq#18 ->(wq_completion)nfc22_nci_cmd_wq#12 ->(wq_completion)nfc23_nci_cmd_wq#10 ->(wq_completion)nfc26_nci_cmd_wq#12 ->(wq_completion)nfc27_nci_cmd_wq#9 ->(wq_completion)nfc30_nci_cmd_wq#10 ->(wq_completion)nfc2_nci_cmd_wq#1475 ->(wq_completion)nfc3_nci_cmd_wq#720 ->(wq_completion)nfc6_nci_cmd_wq#33 ->(wq_completion)nfc4_nci_cmd_wq#276 ->(wq_completion)nfc2_nci_cmd_wq#1476 ->(wq_completion)nfc5_nci_cmd_wq#75 ->(wq_completion)nfc7_nci_cmd_wq#26 ->(wq_completion)nfc8_nci_cmd_wq#19 ->(wq_completion)nfc9_nci_cmd_wq#20 ->(wq_completion)nfc10_nci_cmd_wq#18 ->(wq_completion)nfc13_nci_cmd_wq#23 ->(wq_completion)nfc14_nci_cmd_wq#15 ->(wq_completion)nfc12_nci_cmd_wq#19 ->(wq_completion)nfc11_nci_cmd_wq#18 ->(wq_completion)nfc15_nci_cmd_wq#15 ->(wq_completion)nfc16_nci_cmd_wq#14 ->(wq_completion)nfc19_nci_cmd_wq#13 ->(wq_completion)nfc18_nci_cmd_wq#15 ->(wq_completion)nfc17_nci_cmd_wq#19 ->(wq_completion)nfc20_nci_cmd_wq#10 ->(wq_completion)nfc21_nci_cmd_wq#12 ->(wq_completion)nfc24_nci_cmd_wq#10 ->(wq_completion)nfc22_nci_cmd_wq#13 ->(wq_completion)nfc23_nci_cmd_wq#11 ->(wq_completion)nfc25_nci_cmd_wq#10 ->(wq_completion)nfc28_nci_cmd_wq#15 ->(wq_completion)nfc29_nci_cmd_wq#10 ->(wq_completion)nfc26_nci_cmd_wq#13 ->(wq_completion)nfc32_nci_cmd_wq#11 ->(wq_completion)nfc3_nci_cmd_wq#721 ->(wq_completion)nfc4_nci_cmd_wq#277 ->(wq_completion)nfc2_nci_cmd_wq#1477 ->(wq_completion)nfc5_nci_cmd_wq#76 ->(wq_completion)nfc6_nci_cmd_wq#34 ->(wq_completion)nfc7_nci_cmd_wq#27 ->(wq_completion)nfc8_nci_cmd_wq#20 ->(wq_completion)nfc9_nci_cmd_wq#21 ->(wq_completion)nfc10_nci_cmd_wq#19 ->(wq_completion)nfc11_nci_cmd_wq#19 ->(wq_completion)nfc12_nci_cmd_wq#20 ->(wq_completion)nfc13_nci_cmd_wq#24 ->(wq_completion)nfc14_nci_cmd_wq#16 ->(wq_completion)nfc15_nci_cmd_wq#16 ->(wq_completion)nfc16_nci_cmd_wq#15 ->(wq_completion)nfc17_nci_cmd_wq#20 ->(wq_completion)nfc18_nci_cmd_wq#16 ->(wq_completion)nfc19_nci_cmd_wq#14 ->(wq_completion)nfc31_nci_cmd_wq#10 ->(wq_completion)nfc27_nci_cmd_wq#10 ->(wq_completion)nfc13_nci_cmd_wq#25 ->(wq_completion)nfc12_nci_cmd_wq#21 ->(wq_completion)nfc11_nci_cmd_wq#20 ->(wq_completion)nfc10_nci_cmd_wq#20 ->(wq_completion)nfc9_nci_cmd_wq#22 ->(wq_completion)nfc8_nci_cmd_wq#21 ->(wq_completion)nfc7_nci_cmd_wq#28 ->(wq_completion)nfc6_nci_cmd_wq#35 ->(wq_completion)nfc5_nci_cmd_wq#77 ->(wq_completion)nfc2_nci_cmd_wq#1478 ->(wq_completion)nfc4_nci_cmd_wq#278 ->(wq_completion)nfc3_nci_cmd_wq#722 ->(wq_completion)nfc2_nci_cmd_wq#1479 ->(wq_completion)nfc3_nci_cmd_wq#723 ->(wq_completion)nfc2_nci_cmd_wq#1480 ->(wq_completion)nfc2_nci_cmd_wq#1481 ->(wq_completion)nfc3_nci_cmd_wq#724 ->(wq_completion)nfc2_nci_cmd_wq#1482 ->(wq_completion)nfc3_nci_cmd_wq#725 ->(wq_completion)nfc4_nci_cmd_wq#279 ->(wq_completion)nfc2_nci_cmd_wq#1483 ->(wq_completion)nfc2_nci_cmd_wq#1484 ->(wq_completion)nfc4_nci_cmd_wq#280 ->(wq_completion)nfc3_nci_cmd_wq#726 ->(wq_completion)nfc2_nci_cmd_wq#1485 ->(wq_completion)nfc2_nci_cmd_wq#1486 ->(wq_completion)nfc3_nci_cmd_wq#727 ->(wq_completion)nfc4_nci_cmd_wq#281 ->(wq_completion)nfc2_nci_cmd_wq#1487 ->(wq_completion)nfc2_nci_cmd_wq#1488 ->(wq_completion)nfc2_nci_cmd_wq#1489 ->(wq_completion)nfc3_nci_cmd_wq#729 ->(wq_completion)nfc2_nci_cmd_wq#1490 ->(wq_completion)nfc4_nci_cmd_wq#282 ->(wq_completion)nfc2_nci_cmd_wq#1491 ->(wq_completion)nfc3_nci_cmd_wq#730 ->(wq_completion)nfc2_nci_cmd_wq#1492 ->(wq_completion)nfc4_nci_cmd_wq#283 ->(wq_completion)nfc2_nci_cmd_wq#1493 ->(wq_completion)nfc3_nci_cmd_wq#731 ->(wq_completion)nfc4_nci_cmd_wq#284 ->(wq_completion)nfc2_nci_cmd_wq#1494 ->(wq_completion)nfc2_nci_cmd_wq#1495 ->(wq_completion)nfc3_nci_cmd_wq#732 ->(wq_completion)nfc2_nci_cmd_wq#1496 ->(wq_completion)nfc4_nci_cmd_wq#285 ->(wq_completion)nfc2_nci_cmd_wq#1497 ->(wq_completion)nfc3_nci_cmd_wq#733 ->(wq_completion)nfc2_nci_cmd_wq#1498 ->(wq_completion)nfc2_nci_cmd_wq#1499 ->(wq_completion)nfc2_nci_cmd_wq#1500 ->(wq_completion)nfc3_nci_cmd_wq#734 ->(wq_completion)nfc2_nci_cmd_wq#1501 ->(wq_completion)nfc2_nci_cmd_wq#1502 ->(wq_completion)nfc3_nci_cmd_wq#735 ->(wq_completion)nfc2_nci_cmd_wq#1503 ->(wq_completion)nfc4_nci_cmd_wq#286 ->(wq_completion)nfc3_nci_cmd_wq#736 ->(wq_completion)nfc2_nci_cmd_wq#1504 ->(wq_completion)nfc3_nci_cmd_wq#737 ->(wq_completion)nfc2_nci_cmd_wq#1505 ->(wq_completion)nfc4_nci_cmd_wq#287 ->(wq_completion)nfc2_nci_cmd_wq#1506 ->(wq_completion)nfc2_nci_cmd_wq#1507 ->(wq_completion)nfc3_nci_cmd_wq#738 ->(wq_completion)nfc3_nci_cmd_wq#739 ->(wq_completion)nfc2_nci_cmd_wq#1508 ->(wq_completion)nfc2_nci_cmd_wq#1509 ->(wq_completion)nfc3_nci_cmd_wq#740 ->(wq_completion)nfc4_nci_cmd_wq#288 ->(wq_completion)nfc2_nci_cmd_wq#1510 ->(wq_completion)nfc3_nci_cmd_wq#741 ->(wq_completion)nfc3_nci_cmd_wq#742 ->(wq_completion)nfc2_nci_cmd_wq#1511 ->(wq_completion)nfc4_nci_cmd_wq#289 ->(wq_completion)nfc2_nci_cmd_wq#1512 ->(wq_completion)nfc3_nci_cmd_wq#743 ->(wq_completion)nfc4_nci_cmd_wq#290 ->(wq_completion)nfc2_nci_cmd_wq#1513 ->(wq_completion)nfc3_nci_cmd_wq#744 ->(wq_completion)nfc2_nci_cmd_wq#1514 ->(wq_completion)nfc3_nci_cmd_wq#745 ->(wq_completion)nfc4_nci_cmd_wq#291 ->(wq_completion)nfc2_nci_cmd_wq#1515 ->(wq_completion)nfc2_nci_cmd_wq#1516 ->(wq_completion)nfc3_nci_cmd_wq#746 ->(wq_completion)nfc4_nci_cmd_wq#292 ->(wq_completion)nfc2_nci_cmd_wq#1517 ->(wq_completion)nfc3_nci_cmd_wq#747 ->(wq_completion)nfc2_nci_cmd_wq#1518 ->(wq_completion)nfc3_nci_cmd_wq#748 ->(wq_completion)nfc5_nci_cmd_wq#78 ->(wq_completion)nfc4_nci_cmd_wq#293 ->(wq_completion)nfc2_nci_cmd_wq#1519 ->(wq_completion)nfc3_nci_cmd_wq#749 ->(wq_completion)nfc2_nci_cmd_wq#1520 ->(wq_completion)nfc4_nci_cmd_wq#294 ->(wq_completion)nfc5_nci_cmd_wq#79 ->(wq_completion)nfc2_nci_cmd_wq#1521 ->(wq_completion)nfc3_nci_cmd_wq#750 ->(wq_completion)nfc2_nci_cmd_wq#1522 ->(wq_completion)nfc3_nci_cmd_wq#751 ->(wq_completion)nfc4_nci_cmd_wq#295 ->(wq_completion)nfc3_nci_cmd_wq#752 ->(wq_completion)nfc2_nci_cmd_wq#1523 ->(wq_completion)nfc4_nci_cmd_wq#296 ->(wq_completion)nfc5_nci_cmd_wq#80 ->(wq_completion)nfc2_nci_cmd_wq#1524 ->(wq_completion)nfc3_nci_cmd_wq#753 ->(wq_completion)nfc3_nci_cmd_wq#754 ->(wq_completion)nfc6_nci_cmd_wq#36 ->(wq_completion)nfc6_nci_cmd_wq#37 ->(wq_completion)nfc6_nci_cmd_wq#38 ->(wq_completion)nfc10_nci_cmd_wq#21 ->(wq_completion)nfc14_nci_cmd_wq#17 ->(wq_completion)nfc18_nci_cmd_wq#17 ->(wq_completion)nfc19_nci_cmd_wq#15 ->(wq_completion)nfc26_nci_cmd_wq#14 ->(wq_completion)nfc32_nci_cmd_wq#12 ->(wq_completion)nfc32_nci_cmd_wq#13 ->(wq_completion)nfc25_nci_cmd_wq#11 ->(wq_completion)nfc33_nci_cmd_wq#9 ->(wq_completion)nfc39_nci_cmd_wq#5 ->(wq_completion)nfc36_nci_cmd_wq#9 ->(wq_completion)nfc42_nci_cmd_wq#3 ->(wq_completion)nfc49_nci_cmd_wq#2 ->(wq_completion)nfc2_nci_cmd_wq#1525 ->(wq_completion)nfc4_nci_cmd_wq#297 ->(wq_completion)nfc5_nci_cmd_wq#81 ->(wq_completion)nfc3_nci_cmd_wq#755 ->(wq_completion)nfc7_nci_cmd_wq#29 ->(wq_completion)nfc8_nci_cmd_wq#22 ->(wq_completion)nfc3_nci_cmd_wq#757 ->(wq_completion)nfc6_nci_cmd_wq#39 ->(wq_completion)nfc11_nci_cmd_wq#21 ->(wq_completion)nfc12_nci_cmd_wq#22 ->(wq_completion)nfc15_nci_cmd_wq#17 ->(wq_completion)nfc2_nci_cmd_wq#1526 ->(wq_completion)nfc9_nci_cmd_wq#23 ->(wq_completion)nfc14_nci_cmd_wq#18 ->(wq_completion)nfc10_nci_cmd_wq#22 ->(wq_completion)nfc6_nci_cmd_wq#41 ->(wq_completion)nfc20_nci_cmd_wq#11 ->(wq_completion)nfc52_nci_cmd_wq ->(wq_completion)nfc21_nci_cmd_wq#13 ->(wq_completion)nfc5_nci_cmd_wq#82 ->(wq_completion)nfc19_nci_cmd_wq#16 ->(wq_completion)nfc6_nci_cmd_wq#42 ->(wq_completion)nfc23_nci_cmd_wq#12 ->(wq_completion)nfc51_nci_cmd_wq ->(wq_completion)nfc24_nci_cmd_wq#11 ->(wq_completion)nfc8_nci_cmd_wq#23 ->(wq_completion)nfc48_nci_cmd_wq#2 ->(wq_completion)nfc47_nci_cmd_wq#2 ->(wq_completion)nfc28_nci_cmd_wq#16 ->(wq_completion)nfc45_nci_cmd_wq#2 ->(wq_completion)nfc43_nci_cmd_wq#2 ->(wq_completion)nfc29_nci_cmd_wq#11 ->(wq_completion)nfc30_nci_cmd_wq#11 ->(wq_completion)nfc39_nci_cmd_wq#6 ->(wq_completion)nfc34_nci_cmd_wq#10 ->(wq_completion)nfc37_nci_cmd_wq#8 ->(wq_completion)nfc25_nci_cmd_wq#12 ->(wq_completion)nfc40_nci_cmd_wq#4 ->(wq_completion)nfc38_nci_cmd_wq#6 ->(wq_completion)nfc35_nci_cmd_wq#7 ->(wq_completion)nfc32_nci_cmd_wq#14 ->(wq_completion)nfc33_nci_cmd_wq#10 ->(wq_completion)nfc31_nci_cmd_wq#11 ->(wq_completion)nfc26_nci_cmd_wq#15 ->(wq_completion)nfc27_nci_cmd_wq#11 ->(wq_completion)nfc22_nci_cmd_wq#14 ->(wq_completion)nfc36_nci_cmd_wq#10 ->(wq_completion)nfc18_nci_cmd_wq#18 ->(wq_completion)nfc17_nci_cmd_wq#21 ->(wq_completion)nfc16_nci_cmd_wq#16 ->(wq_completion)nfc41_nci_cmd_wq#3 ->(wq_completion)nfc13_nci_cmd_wq#26 ->(wq_completion)nfc44_nci_cmd_wq#2 ->(wq_completion)nfc42_nci_cmd_wq#4 ->(wq_completion)nfc4_nci_cmd_wq#298 ->(wq_completion)nfc7_nci_cmd_wq#30 ->(wq_completion)nfc3_nci_cmd_wq#758 ->(wq_completion)nfc2_nci_cmd_wq#1528 ->(wq_completion)nfc49_nci_cmd_wq#3 ->(wq_completion)nfc50_nci_cmd_wq#2 ->(wq_completion)nfc46_nci_cmd_wq#2 ->(wq_completion)nfc2_nci_cmd_wq#1529 ->(wq_completion)nfc3_nci_cmd_wq#759 ->(wq_completion)nfc2_nci_cmd_wq#1530 ->(wq_completion)nfc3_nci_cmd_wq#760 ->(wq_completion)nfc4_nci_cmd_wq#299 ->(wq_completion)nfc2_nci_cmd_wq#1531 ->(wq_completion)nfc3_nci_cmd_wq#761 ->(wq_completion)nfc2_nci_cmd_wq#1532 ->(wq_completion)nfc3_nci_cmd_wq#762 ->(wq_completion)nfc4_nci_cmd_wq#300 ->(wq_completion)nfc2_nci_cmd_wq#1533 ->(wq_completion)nfc5_nci_cmd_wq#83 ->(wq_completion)nfc2_nci_cmd_wq#1534 ->(wq_completion)nfc3_nci_cmd_wq#763 ->(wq_completion)nfc4_nci_cmd_wq#301 ->(wq_completion)nfc2_nci_cmd_wq#1535 ->(wq_completion)nfc3_nci_cmd_wq#764 ->(wq_completion)nfc2_nci_cmd_wq#1536 ->(wq_completion)nfc3_nci_cmd_wq#765 ->(wq_completion)nfc2_nci_cmd_wq#1537 ->(wq_completion)nfc4_nci_cmd_wq#302 ->(wq_completion)nfc2_nci_cmd_wq#1538 ->(wq_completion)nfc2_nci_cmd_wq#1539 ->(wq_completion)nfc3_nci_cmd_wq#766 ->(wq_completion)nfc2_nci_cmd_wq#1540 ->(wq_completion)nfc3_nci_cmd_wq#767 ->(wq_completion)nfc4_nci_cmd_wq#303 ->(wq_completion)nfc5_nci_cmd_wq#84 ->(wq_completion)nfc3_nci_cmd_wq#768 ->(wq_completion)nfc2_nci_cmd_wq#1541 ->(wq_completion)nfc2_nci_cmd_wq#1542 ->(wq_completion)nfc2_nci_cmd_wq#1543 ->(wq_completion)nfc5_nci_cmd_wq#86 ->(wq_completion)nfc6_nci_cmd_wq#44 ->(wq_completion)nfc2_nci_cmd_wq#1544 ->(wq_completion)nfc4_nci_cmd_wq#304 ->(wq_completion)nfc3_nci_cmd_wq#769 ->(wq_completion)nfc5_nci_cmd_wq#87 ->(wq_completion)nfc2_nci_cmd_wq#1545 ->(wq_completion)nfc3_nci_cmd_wq#770 ->(wq_completion)nfc4_nci_cmd_wq#305 ->(wq_completion)nfc5_nci_cmd_wq#88 ->(wq_completion)nfc2_nci_cmd_wq#1546 ->(wq_completion)nfc3_nci_cmd_wq#771 ->(wq_completion)nfc5_nci_cmd_wq#89 ->(wq_completion)nfc4_nci_cmd_wq#306 ->(wq_completion)nfc2_nci_cmd_wq#1547 ->(wq_completion)nfc3_nci_cmd_wq#772 ->(wq_completion)nfc2_nci_cmd_wq#1548 ->(wq_completion)nfc4_nci_cmd_wq#307 ->(wq_completion)nfc3_nci_cmd_wq#773 ->(wq_completion)nfc3_nci_cmd_wq#774 ->(wq_completion)nfc4_nci_cmd_wq#308 ->(wq_completion)nfc2_nci_cmd_wq#1549 ->(wq_completion)nfc5_nci_cmd_wq#90 ->(wq_completion)nfc3_nci_cmd_wq#775 ->(wq_completion)nfc2_nci_cmd_wq#1550 ->(wq_completion)nfc4_nci_cmd_wq#309 ->(wq_completion)nfc3_nci_cmd_wq#776 ->(wq_completion)nfc2_nci_cmd_wq#1551 ->(wq_completion)nfc2_nci_cmd_wq#1552 ->(wq_completion)nfc3_nci_cmd_wq#777 ->(wq_completion)nfc2_nci_cmd_wq#1553 ->(wq_completion)nfc2_nci_cmd_wq#1554 ->(wq_completion)nfc2_nci_cmd_wq#1555 ->(wq_completion)nfc3_nci_cmd_wq#779 ->(wq_completion)nfc2_nci_cmd_wq#1556 ->(wq_completion)nfc4_nci_cmd_wq#310 ->(wq_completion)nfc2_nci_cmd_wq#1557 ->(wq_completion)nfc2_nci_cmd_wq#1558 ->(wq_completion)nfc2_nci_cmd_wq#1559 ->(wq_completion)nfc3_nci_cmd_wq#780 ->(wq_completion)nfc2_nci_cmd_wq#1560 ->(wq_completion)nfc2_nci_cmd_wq#1561 ->(wq_completion)nfc2_nci_cmd_wq#1562 ->(wq_completion)nfc2_nci_cmd_wq#1563 ->(wq_completion)nfc3_nci_cmd_wq#781 ->(wq_completion)nfc2_nci_cmd_wq#1564 ->(wq_completion)nfc4_nci_cmd_wq#311 ->(wq_completion)nfc2_nci_cmd_wq#1565 ->(wq_completion)nfc3_nci_cmd_wq#782 ->(wq_completion)nfc2_nci_cmd_wq#1566 ->(wq_completion)nfc3_nci_cmd_wq#783 ->(wq_completion)nfc2_nci_cmd_wq#1567 ->(wq_completion)nfc3_nci_cmd_wq#784 ->(wq_completion)nfc2_nci_cmd_wq#1568 ->(wq_completion)nfc4_nci_cmd_wq#312 ->(wq_completion)nfc3_nci_cmd_wq#785 ->(wq_completion)nfc2_nci_cmd_wq#1569 ->(wq_completion)nfc2_nci_cmd_wq#1570 ->(wq_completion)nfc2_nci_cmd_wq#1571 ->(wq_completion)nfc2_nci_cmd_wq#1572 ->(wq_completion)nfc2_nci_cmd_wq#1573 ->(wq_completion)nfc2_nci_cmd_wq#1574 ->(wq_completion)nfc2_nci_cmd_wq#1575 ->(wq_completion)nfc2_nci_cmd_wq#1576 ->(wq_completion)nfc2_nci_cmd_wq#1577 ->(wq_completion)nfc2_nci_cmd_wq#1578 ->(wq_completion)nfc2_nci_cmd_wq#1579 ->(wq_completion)nfc2_nci_cmd_wq#1580 ->(wq_completion)nfc2_nci_cmd_wq#1581 ->(wq_completion)nfc2_nci_cmd_wq#1582 ->(wq_completion)nfc2_nci_cmd_wq#1583 ->(wq_completion)nfc2_nci_cmd_wq#1584 ->(wq_completion)nfc2_nci_cmd_wq#1585 ->(wq_completion)nfc2_nci_cmd_wq#1586 ->(wq_completion)nfc2_nci_cmd_wq#1587 ->(wq_completion)nfc2_nci_cmd_wq#1588 ->(wq_completion)nfc2_nci_cmd_wq#1589 ->(wq_completion)nfc2_nci_cmd_wq#1590 ->(wq_completion)nfc2_nci_cmd_wq#1591 ->(wq_completion)nfc2_nci_cmd_wq#1592 ->(wq_completion)nfc2_nci_cmd_wq#1593 ->(wq_completion)nfc2_nci_cmd_wq#1594 ->(wq_completion)nfc2_nci_cmd_wq#1595 ->(wq_completion)nfc2_nci_cmd_wq#1596 ->(wq_completion)nfc2_nci_cmd_wq#1597 ->(wq_completion)nfc2_nci_cmd_wq#1598 ->(wq_completion)nfc2_nci_cmd_wq#1599 ->(wq_completion)nfc2_nci_cmd_wq#1600 ->(wq_completion)nfc2_nci_cmd_wq#1601 ->(wq_completion)nfc2_nci_cmd_wq#1602 ->(wq_completion)nfc3_nci_cmd_wq#786 ->(wq_completion)nfc2_nci_cmd_wq#1603 ->(wq_completion)nfc2_nci_cmd_wq#1604 ->(wq_completion)nfc2_nci_cmd_wq#1605 ->(wq_completion)nfc2_nci_cmd_wq#1606 ->(wq_completion)nfc3_nci_cmd_wq#787 ->(wq_completion)nfc2_nci_cmd_wq#1607 ->(wq_completion)nfc3_nci_cmd_wq#788 ->(wq_completion)nfc4_nci_cmd_wq#313 ->(wq_completion)nfc3_nci_cmd_wq#789 ->(wq_completion)nfc2_nci_cmd_wq#1608 ->(wq_completion)nfc2_nci_cmd_wq#1609 ->(wq_completion)nfc2_nci_cmd_wq#1610 ->(wq_completion)nfc2_nci_cmd_wq#1611 ->(wq_completion)nfc3_nci_cmd_wq#790 ->(wq_completion)nfc2_nci_cmd_wq#1612 ->(wq_completion)nfc2_nci_cmd_wq#1613 ->(wq_completion)nfc2_nci_cmd_wq#1614 ->(wq_completion)nfc2_nci_cmd_wq#1615 ->(wq_completion)nfc2_nci_cmd_wq#1616 ->(wq_completion)nfc2_nci_cmd_wq#1617 ->(wq_completion)nfc2_nci_cmd_wq#1618 ->(wq_completion)nfc3_nci_cmd_wq#791 ->(wq_completion)nfc2_nci_cmd_wq#1619 ->(wq_completion)nfc3_nci_cmd_wq#792 ->(wq_completion)nfc2_nci_cmd_wq#1620 ->(wq_completion)nfc3_nci_cmd_wq#793 ->(wq_completion)nfc2_nci_cmd_wq#1621 ->(wq_completion)nfc4_nci_cmd_wq#314 ->(wq_completion)nfc2_nci_cmd_wq#1622 ->(wq_completion)nfc2_nci_cmd_wq#1623 ->(wq_completion)nfc3_nci_cmd_wq#794 ->(wq_completion)nfc2_nci_cmd_wq#1624 ->(wq_completion)nfc2_nci_cmd_wq#1625 ->(wq_completion)nfc3_nci_cmd_wq#795 ->(wq_completion)nfc2_nci_cmd_wq#1626 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#396 FD: 1 BD: 2 ....: (&ndev->cmd_timer) FD: 1 BD: 2 ....: (&ndev->data_timer) FD: 25 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#11 ->&rq->__lock FD: 33 BD: 48 +.-.: (&peer->timer_persistent_keepalive) ->pool_lock#2 ->&list->lock#14 ->tk_core.seq.seqcount ->&c->lock ->&n->list_lock ->&____s->seqcount#2 ->&____s->seqcount ->batched_entropy_u8.lock ->kfence_freelist_lock ->init_task.mems_allowed_seq.seqcount FD: 25 BD: 5 +.+.: (work_completion)(&rfkill->uevent_work) ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#498 FD: 1 BD: 1 +.+.: &local->sockets.lock FD: 1 BD: 1 +.+.: &local->raw_sockets.lock FD: 1 BD: 1 ....: (&local->link_timer) FD: 25 BD: 1 +.+.: (work_completion)(&local->tx_work) ->&rq->__lock FD: 25 BD: 1 +.+.: (work_completion)(&local->rx_work) ->&rq->__lock FD: 25 BD: 1 +.+.: (work_completion)(&local->timeout_work) ->&rq->__lock FD: 1 BD: 1 ....: (&local->sdreq_timer) FD: 25 BD: 1 +.+.: (work_completion)(&local->sdreq_timeout_work) ->&rq->__lock FD: 1 BD: 56 .+.-: &table->lock#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#27 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#591 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#485 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#196 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#957 FD: 1 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#967 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#8 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#563 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#955 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#966 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#142 FD: 25 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#19 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#203 FD: 1 BD: 54 +.+.: nf_hook_mutex.wait_lock FD: 1 BD: 51 +.+.: (work_completion)(&sta->drv_deliver_wk) FD: 1 BD: 1 +.+.: &xa->xa_lock#20 FD: 81 BD: 2 +.+.: sk_lock-AF_INET/1 ->slock-AF_INET#2 ->rlock-AF_INET ->&list->lock#17 ->&rq->__lock ->rcu_node_0 ->&obj_hash[i].lock ->&base->lock ->pool_lock#2 ->krc.lock ->&____s->seqcount FD: 1 BD: 3 +.-.: rlock-AF_INET FD: 1 BD: 9 ....: &list->lock#17 FD: 1 BD: 54 ++.-: &sctp_ep_hashtable[i].lock FD: 1 BD: 60 +.-.: &nf_nat_locks[i] FD: 1 BD: 2 +.+.: loop_validate_mutex.wait_lock FD: 1 BD: 47 +.+.: (work_completion)(&(&bond->mii_work)->work) FD: 30 BD: 1 ..-.: &(&bat_priv->tt.work)->timer FD: 82 BD: 1 +.-.: (&ndev->rs_timer) ->&ndev->lock ->pool_lock#2 ->&c->lock ->&dir->lock#2 ->&ul->lock#2 ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock ->&zone->lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->init_task.mems_allowed_seq.seqcount FD: 25 BD: 1 .+.+: drm_unplug_srcu ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 17 +.+.: &file->master_lookup_lock FD: 40 BD: 50 +...: _xmit_NETROM ->(console_sem).lock ->&obj_hash[i].lock ->pool_lock#2 ->quarantine_lock FD: 1 BD: 49 +...: _xmit_TUNNEL#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#526 FD: 1 BD: 5 +.+.: (work_completion)(&(&c->work)->work) FD: 62 BD: 3 +.+.: &type->s_umount_key#48 ->&x->wait#23 ->shrinker_mutex ->&obj_hash[i].lock ->pool_lock#2 ->rename_lock.seqcount ->&dentry->d_lock ->&sb->s_type->i_lock_key#19 ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->&dentry->d_lock/1 FD: 1 BD: 55 ..-.: key#23 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#542 ->&rq->__lock FD: 1 BD: 47 +.+.: (work_completion)(&port->wq) FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#786 ->&rq->__lock FD: 1 BD: 49 +...: _xmit_SIT#2 FD: 25 BD: 1 +.+.: (work_completion)(&data->fib_flush_work) ->&rq->__lock FD: 1 BD: 97 ....: key#22 FD: 1 BD: 2 +.+.: nf_sockopt_mutex.wait_lock FD: 30 BD: 6 +.+.: (work_completion)(&(&bat_priv->bla.work)->work) ->key#20 ->&obj_hash[i].lock ->&base->lock ->rcu_node_0 ->&rq->__lock ->crngs.lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#898 FD: 1 BD: 92 +.+.: freezer_mutex.wait_lock FD: 54 BD: 48 +.-.: (&peer->timer_retransmit_handshake) ->&peer->endpoint_lock FD: 34 BD: 6 +.+.: (work_completion)(&(&bat_priv->tt.work)->work) ->key#16 ->key#21 ->&bat_priv->tt.req_list_lock ->&bat_priv->tt.roam_list_lock ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->&cfs_rq->removed.lock ->pool_lock#2 ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 7 +...: key#21 FD: 1 BD: 7 +...: &bat_priv->tt.req_list_lock FD: 1 BD: 7 +...: &bat_priv->tt.roam_list_lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#341 ->&rq->__lock FD: 1 BD: 5 +...: &tn->node_list_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1466 FD: 32 BD: 48 +.-.: (&peer->timer_send_keepalive) ->pool_lock#2 ->&c->lock ->&list->lock#14 ->tk_core.seq.seqcount ->&____s->seqcount#2 ->&____s->seqcount ->&n->list_lock ->batched_entropy_u8.lock ->kfence_freelist_lock FD: 10 BD: 51 +...: &idev->mc_query_lock ->&obj_hash[i].lock FD: 43 BD: 47 +.+.: __ip_vs_mutex ->&ipvs->dest_trash_lock ->&rq->__lock ->(console_sem).lock ->console_owner_lock ->console_owner ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 ->__ip_vs_mutex.wait_lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 1 +.+.: (work_completion)(&hdev->reenable_adv_work) FD: 30 BD: 1 ..-.: &(&bat_priv->bla.work)->timer FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1180 FD: 1 BD: 7 +...: &hash->list_locks[i] FD: 16 BD: 1 +.-.: (&n->timer) ->&n->lock FD: 1 BD: 115 +.+.: smack_known_lock.wait_lock FD: 26 BD: 53 ....: &sk->sk_lock.wq ->&p->pi_lock FD: 29 BD: 6 +.+.: (work_completion)(&(&bat_priv->dat.work)->work) ->&hash->list_locks[i] ->&obj_hash[i].lock ->&base->lock ->&rq->__lock ->&cfs_rq->removed.lock ->rcu_node_0 ->pool_lock#2 ->&rcu_state.expedited_wq FD: 1 BD: 1 +.+.: ®ion->snapshot_lock FD: 1 BD: 1 +.+.: (work_completion)(&data->suspend_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#134 FD: 1 BD: 59 +.-.: &r->producer_lock#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#10 FD: 1 BD: 163 +.+.: pcpu_alloc_mutex.wait_lock FD: 1 BD: 48 +.+.: (work_completion)(&peer->clear_peer_work) FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1209 ->&rq->__lock FD: 30 BD: 1 ..-.: &(&bat_priv->dat.work)->timer FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#461 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1054 FD: 1 BD: 49 +...: _xmit_IPGRE#2 FD: 1 BD: 5 +.+.: &fn->fou_lock FD: 1 BD: 51 ....: (&ifibss->timer) FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#14 ->&rq->__lock FD: 1 BD: 51 ....: (&local->dynamic_ps_timer) FD: 1 BD: 5 +.+.: nfc_devlist_mutex.wait_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1198 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#923 FD: 1 BD: 5 +...: k-clock-AF_NETLINK FD: 1 BD: 47 +.+.: (work_completion)(&(&priv->connect)->work) FD: 1 BD: 48 ....: (&peer->timer_new_handshake) FD: 1 BD: 48 ....: (&peer->timer_zero_key_material) FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#4 FD: 1 BD: 47 +.+.: isotp_notifier_lock FD: 1 BD: 47 +...: &net->xfrm.xfrm_state_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#368 FD: 1 BD: 47 +.+.: raw_notifier_lock FD: 51 BD: 1 +.-.: (&p->forward_delay_timer) ->&br->lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#413 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#123 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1258 ->&rq->__lock FD: 29 BD: 1 +.-.: (&timer) ->&obj_hash[i].lock ->&base->lock ->&txlock ->&txwq FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#652 FD: 1 BD: 49 +...: _xmit_NONE#2 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#211 ->&rq->__lock FD: 40 BD: 2 +.+.: fqdir_free_work ->rcu_state.barrier_mutex ->&obj_hash[i].lock ->pool_lock#2 ->rcu_state.barrier_mutex.wait_lock ->&p->pi_lock ->quarantine_lock FD: 1 BD: 1 +.+.: (work_completion)(&(&team->mcast_rejoin.dw)->work) FD: 1 BD: 5 ....: (&net->fs_probe_timer) FD: 1 BD: 1 ....: (&local->client_conn_reap_timer) FD: 1 BD: 54 +.+.: rcu_state.exp_mutex.wait_lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#585 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1273 FD: 1 BD: 5 +.+.: &hn->hn_lock FD: 28 BD: 47 +.+.: &caifn->caifdevs.lock ->&obj_hash[i].lock ->&x->wait#3 ->&rq->__lock ->pool_lock#2 ->&this->info_list_lock ->&cfs_rq->removed.lock ->pool_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#551 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_tx_wq#550 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#545 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1245 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#188 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#163 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#886 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#159 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1165 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#428 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#896 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#8 FD: 1 BD: 47 +...: nr_neigh_list_lock FD: 1 BD: 47 +...: nr_list_lock FD: 72 BD: 1 +.+.: &type->s_umount_key#47 ->&x->wait#23 ->shrinker_mutex ->&obj_hash[i].lock ->pool_lock#2 ->rename_lock.seqcount ->&dentry->d_lock ->&dentry->d_lock/1 ->&sb->s_type->i_lock_key#32 ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->&fsnotify_mark_srcu ->binderfs_minors_mutex ->&rq->__lock ->quarantine_lock ->rcu_node_0 FD: 1 BD: 1 +.+.: &mq_lock FD: 78 BD: 2 +.+.: free_ipc_work ->&obj_hash[i].lock ->&x->wait#3 ->&rq->__lock ->mount_lock ->&fsnotify_mark_srcu ->&dentry->d_lock ->&type->s_umount_key#48 ->sb_lock ->unnamed_dev_ida.xa_lock ->list_lrus_mutex ->&xa->xa_lock#5 ->pool_lock#2 ->mnt_id_ida.xa_lock ->&ids->rwsem ->(work_completion)(&ht->run_work) ->&ht->mutex ->percpu_counters_lock ->pcpu_lock ->sysctl_lock ->&sb->s_type->i_lock_key#23 ->rename_lock.seqcount ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->proc_inum_ida.xa_lock FD: 1 BD: 5 +.+.: (work_completion)(&net->xfrm.policy_hash_work) FD: 1 BD: 51 ....: (&dwork->timer)#4 FD: 1 BD: 3 +.+.: &ids->rwsem FD: 1 BD: 5 +.+.: (work_completion)(&local->restart_work) FD: 1 BD: 5 +.+.: (work_completion)(&rdev->conn_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#13 FD: 1 BD: 12 +.+.: (work_completion)(&(&hdev->interleave_scan)->work) FD: 25 BD: 23 +.+.: (work_completion)(&(&conn->id_addr_timer)->work) ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#991 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#972 FD: 53 BD: 22 +.+.: (work_completion)(&(&conn->disc_work)->work) ->&hdev->unregister_lock FD: 1 BD: 22 +.+.: (work_completion)(&(&conn->auto_accept_work)->work) FD: 1 BD: 22 +.+.: (work_completion)(&(&conn->idle_work)->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#114 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#108 FD: 1 BD: 48 +...: &ipvs->dest_trash_lock FD: 1 BD: 47 +.+.: flowtable_lock FD: 32 BD: 7 +.+.: nf_conntrack_mutex ->&nf_conntrack_locks[i] ->&rq->__lock ->&____s->seqcount#7 ->&obj_hash[i].lock ->pool_lock#2 ->&nf_conntrack_locks[i]/1 ->rcu_node_0 ->&rcu_state.expedited_wq ->&cfs_rq->removed.lock FD: 1 BD: 49 ....: (&p->rexmit_timer) FD: 1 BD: 47 +...: &bat_priv->forw_bcast_list_lock FD: 1 BD: 6 +.+.: ebt_mutex.wait_lock FD: 25 BD: 51 +.+.: (work_completion)(&(&idev->mc_report_work)->work) ->&rq->__lock FD: 25 BD: 51 +.+.: &net->xdp.lock ->&rq->__lock FD: 1 BD: 51 +.+.: mirred_list_lock FD: 1 BD: 51 +...: &idev->mc_report_lock FD: 25 BD: 51 +.+.: &pnn->pndevs.lock ->&rq->__lock FD: 25 BD: 51 +.+.: &pnn->routes.lock ->&rq->__lock FD: 43 BD: 48 +.+.: (work_completion)(&br->mcast_gc_work) ->&br->multicast_lock ->(&p->rexmit_timer) ->&obj_hash[i].lock ->&base->lock ->(&p->timer) ->pool_lock#2 ->krc.lock ->(&mp->timer) FD: 1 BD: 49 +...: &qdisc_xmit_lock_key#2 FD: 1 BD: 49 +...: &qdisc_xmit_lock_key FD: 1 BD: 5 ....: netdev_unregistering_wq.lock FD: 862 BD: 1 +.+.: (wq_completion)netns ->net_cleanup_work FD: 861 BD: 2 +.+.: net_cleanup_work ->pernet_ops_rwsem ->rcu_state.barrier_mutex ->&obj_hash[i].lock ->pool_lock#2 ->krc.lock ->&dir->lock ->rcu_state.barrier_mutex.wait_lock ->&p->pi_lock ->&rq->__lock FD: 1 BD: 5 +...: &net->nsid_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#989 FD: 25 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#4 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#937 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#947 FD: 1 BD: 48 +...: &net->xfrm.xfrm_policy_lock FD: 1 BD: 47 +.+.: bcm_notifier_lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#342 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#930 ->&rq->__lock FD: 4 BD: 1 +.+.: put_task_map-wait-type-override#2 ->&obj_hash[i].lock ->pool_lock#2 FD: 5 BD: 48 +...: _xmit_SLIP#2 ->&eql->queue.lock FD: 1 BD: 48 +...: &msk->pm.lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1106 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#16 FD: 1 BD: 47 +...: &bond->ipsec_lock FD: 25 BD: 1 +.+.: (work_completion)(&(&team->notify_peers.dw)->work) ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1115 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#609 ->&rq->__lock FD: 1 BD: 47 +...: _xmit_NETROM#2 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#952 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc36_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1599 FD: 1 BD: 5 +.+.: netns_bpf_mutex FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1563 ->&rq->__lock FD: 1 BD: 7 ++++: &net->cells_lock FD: 1 BD: 5 ....: (&net->cells_timer) FD: 33 BD: 1 +.+.: (wq_completion)afs ->(work_completion)(&net->cells_manager) ->(work_completion)(&net->fs_manager) FD: 30 BD: 2 +.+.: (work_completion)(&net->cells_manager) ->&net->cells_lock ->bit_wait_table + i ->&rq->__lock ->rcu_node_0 ->&rcu_state.expedited_wq FD: 1 BD: 5 ....: (&net->fs_timer) FD: 28 BD: 2 +.+.: (work_completion)(&net->fs_manager) ->&(&net->fs_lock)->lock ->bit_wait_table + i ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 3 +.+.: &(&net->fs_lock)->lock FD: 1 BD: 6 +.+.: &rx->incoming_lock FD: 1 BD: 6 +.+.: &call->notify_lock FD: 1 BD: 6 ....: (rxrpc_call_limiter).lock FD: 1 BD: 6 +.+.: &rx->recvmsg_lock FD: 1 BD: 6 ....: (&call->timer) FD: 1 BD: 6 ....: &list->lock#18 FD: 1 BD: 5 +.+.: (wq_completion)kafsd FD: 1 BD: 5 +...: k-clock-AF_RXRPC FD: 1 BD: 5 ..-.: rlock-AF_RXRPC FD: 1 BD: 1 ....: &list->lock#19 FD: 1 BD: 8 +.+.: (work_completion)(&data->gc_work) FD: 1 BD: 5 +.+.: (work_completion)(&ovs_net->dp_notify_work) FD: 1 BD: 5 +...: &srv->idr_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#211 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#751 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#595 FD: 60 BD: 1 +.+.: (wq_completion)inet_frag_wq ->(work_completion)(&fqdir->destroy_work) FD: 4 BD: 5 +.+.: &bat_priv->bat_v.ogm_buff_mutex ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 7 +...: &nt->cluster_scope_lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1321 ->&rq->__lock FD: 1 BD: 5 +.+.: (work_completion)(&tn->work) FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1321 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#603 ->&rq->__lock FD: 1 BD: 5 +.+.: (work_completion)(&(&bat_priv->bat_v.ogm_wq)->work) FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#576 ->&rq->__lock FD: 1 BD: 5 +.+.: (wq_completion)krdsd FD: 1 BD: 5 +.+.: (work_completion)(&rtn->rds_tcp_accept_w) FD: 1 BD: 51 +...: &icsk->icsk_accept_queue.rskq_lock#2 FD: 1 BD: 5 ....: rds_tcp_conn_lock FD: 1 BD: 5 ....: loop_conns_lock FD: 1 BD: 5 +.+.: (wq_completion)l2tp FD: 1 BD: 5 ....: (&rxnet->service_conn_reap_timer) FD: 2 BD: 6 +.+.: (work_completion)(&rxnet->service_conn_reaper) ->&rxnet->conn_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#582 FD: 1 BD: 5 +...: &bat_priv->gw.list_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#29 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#157 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1595 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1157 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1126 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#143 FD: 1 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#21 FD: 1 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#6 FD: 1 BD: 47 +...: &bpq_netdev_addr_lock_key FD: 1 BD: 5 +.+.: ipvs->sync_mutex FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1064 FD: 1 BD: 5 +.+.: (work_completion)(&(&cnet->ecache.dwork)->work) FD: 1 BD: 47 +.+.: (work_completion)(&wdev->disconnect_wk) FD: 1 BD: 47 +.+.: (work_completion)(&wdev->pmsr_free_wk) FD: 702 BD: 6 +.+.: (work_completion)(&(&rdev->dfs_update_channels_wk)->work) ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock FD: 1 BD: 51 ....: (&dwork->timer)#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1004 FD: 1 BD: 47 ....: &rdev->dev_wait FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#404 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#125 FD: 1 BD: 5 +.+.: (work_completion)(&(&rdev->background_cac_done_wk)->work) FD: 1 BD: 5 +.+.: (work_completion)(&rdev->destroy_work) FD: 1 BD: 5 +.+.: (work_completion)(&rdev->propagate_radar_detect_wk) FD: 1 BD: 5 +.+.: (work_completion)(&rdev->propagate_cac_done_wk) FD: 1 BD: 5 +.+.: (work_completion)(&rdev->mgmt_registrations_update_wk) FD: 1 BD: 5 +.+.: (work_completion)(&rdev->background_cac_abort_wk) FD: 1 BD: 5 ....: (&local->sta_cleanup) FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#598 ->&rq->__lock FD: 33 BD: 2 +.+.: (work_completion)(&(&devlink->rwork)->work) ->&obj_hash[i].lock ->&x->wait#3 ->&rq->__lock ->pool_lock#2 ->&cfs_rq->removed.lock ->quarantine_lock ->rcu_node_0 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1545 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#393 FD: 1 BD: 3 +.+.: capidev_list_lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#977 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#651 ->&rq->__lock FD: 25 BD: 47 +.+.: (work_completion)(&(&priv->scan_result)->work) ->&rq->__lock FD: 39 BD: 49 +.-.: (&p->timer) ->&br->multicast_lock FD: 39 BD: 49 +.-.: (&mp->timer) ->&br->multicast_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1073 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#465 FD: 1 BD: 1 +.+.: (wq_completion)nfc38_nci_tx_wq#2 FD: 1 BD: 47 ....: (&pmctx->ip6_mc_router_timer) FD: 1 BD: 47 ....: (&pmctx->ip4_mc_router_timer) FD: 1 BD: 49 +...: &vlan_netdev_xmit_lock_key FD: 1 BD: 49 +...: &batadv_netdev_xmit_lock_key FD: 1 BD: 49 +...: &qdisc_xmit_lock_key#3 FD: 1 BD: 49 +...: &qdisc_xmit_lock_key#4 FD: 1 BD: 49 +...: _xmit_LOOPBACK#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#981 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#6 FD: 1 BD: 89 ...-: init_task.mems_allowed_seq.seqcount FD: 1 BD: 47 +.+.: (work_completion)(&(&bond->arp_work)->work) FD: 1 BD: 47 +.+.: (work_completion)(&(&bond->alb_work)->work) FD: 1 BD: 47 +.+.: (work_completion)(&(&bond->ad_work)->work) FD: 1 BD: 47 +.+.: (work_completion)(&(&bond->mcast_work)->work) FD: 1 BD: 47 +.+.: (work_completion)(&(&bond->slave_arr_work)->work) FD: 1 BD: 47 ....: (&br->hello_timer) FD: 1 BD: 47 ....: (&br->topology_change_timer) FD: 1 BD: 47 ....: (&br->tcn_timer) FD: 1 BD: 47 ....: (&brmctx->ip4_mc_router_timer) FD: 1 BD: 47 ....: (&brmctx->ip4_other_query.timer) FD: 1 BD: 47 ....: (&brmctx->ip4_other_query.delay_timer) FD: 1 BD: 47 ....: (&brmctx->ip6_mc_router_timer) FD: 1 BD: 47 ....: (&brmctx->ip6_other_query.timer) FD: 1 BD: 47 ....: (&brmctx->ip6_other_query.delay_timer) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#908 FD: 5 BD: 1 +...: &nr_netdev_xmit_lock_key ->nr_node_list_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 2 +...: nr_node_list_lock FD: 5 BD: 1 +...: _xmit_X25#2 ->&lapbeth->up_lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#405 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#13 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1372 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#346 FD: 1 BD: 48 +...: &this->info_list_lock FD: 1 BD: 5 +.+.: &pnetids_ndev->lock FD: 32 BD: 5 +.+.: k-sk_lock-AF_INET6/1 ->k-slock-AF_INET6 ->rlock-AF_INET6 ->&list->lock#17 FD: 1 BD: 8 ....: rlock-AF_INET6 FD: 6 BD: 51 +...: k-slock-AF_INET6/1 ->&sctp_ep_hashtable[i].lock ->&obj_hash[i].lock ->pool_lock#2 ->k-clock-AF_INET6 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#574 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1267 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#555 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#184 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1239 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1232 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1232 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#174 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1226 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1400 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1261 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#559 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1400 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1124 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#530 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#171 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1223 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#115 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1383 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#941 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#525 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1215 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#168 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#520 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#514 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1596 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1190 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1182 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#18 FD: 25 BD: 1 +.+.: usbfs_mutex ->&rq->__lock FD: 1 BD: 4 +.+.: oom_adj_mutex.wait_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#479 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#480 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#149 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1160 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1148 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1150 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1154 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1136 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1125 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#452 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#461 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1100 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#132 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#152 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1079 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#16 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1077 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#129 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#429 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#5 FD: 77 BD: 1 +.-.: (&sk->sk_timer) ->slock-AF_INET#2 FD: 1 BD: 3 +...: slock-AF_PHONET FD: 25 BD: 1 +.+.: (wq_completion)nfc37_nci_rx_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc38_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1056 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#126 FD: 1 BD: 5 +.+.: &sn->gssp_lock FD: 1 BD: 8 +.+.: &cd->hash_lock FD: 1 BD: 5 +.+.: xfrm_state_gc_work FD: 1 BD: 5 +...: ip6_fl_lock FD: 1 BD: 5 ....: (&net->ipv6.ip6_fib_timer) FD: 1 BD: 47 ....: (&mrt->ipmr_expire_timer) FD: 1 BD: 5 ....: (&ipvs->dest_trash_timer) FD: 1 BD: 5 +.+.: (work_completion)(&(&ipvs->expire_nodest_conn_work)->work) FD: 1 BD: 5 +.+.: (work_completion)(&(&ipvs->est_reload_work)->work) FD: 1 BD: 5 +...: recent_lock FD: 1 BD: 5 +.+.: hashlimit_mutex FD: 1 BD: 5 +.+.: trans_gc_work FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1499 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#432 ->&rq->__lock FD: 1 BD: 5 +.+.: (work_completion)(&net->xfrm.state_hash_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#415 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#423 FD: 1 BD: 51 +.+.: (wq_completion)phy162 FD: 1 BD: 50 +.+.: rcu_state.barrier_mutex.wait_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#407 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#406 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1035 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#128 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#927 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1012 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#403 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#401 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#122 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1002 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1000 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#395 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1118 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#995 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#992 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#992 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#401 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#984 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#984 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#387 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#985 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#974 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#976 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#374 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#373 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#110 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#971 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#109 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#376 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#372 FD: 1 BD: 92 +.+.: gdp_mutex.wait_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#955 FD: 1 BD: 1 +.+.: (wq_completion)nfc39_nci_rx_wq#2 FD: 25 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#6 ->&rq->__lock FD: 1 BD: 2 +...: l2tp_ip6_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc40_nci_rx_wq#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#4 FD: 33 BD: 2 +.+.: (work_completion)(&pool->idle_cull_work) ->wq_pool_attach_mutex ->wq_pool_attach_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1514 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#98 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#355 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#933 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1444 ->&rq->__lock FD: 1 BD: 93 +.+.: wq_pool_mutex.wait_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1419 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1591 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#892 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1071 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#442 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1297 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#387 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#524 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#566 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1591 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1583 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1424 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1030 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#940 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#922 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#404 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1576 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1588 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#605 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#22 FD: 135 BD: 48 +.+.: team->team_lock_key#76 ->&rq->__lock ->fs_reclaim ->netpoll_srcu ->net_rwsem ->&tn->lock ->_xmit_ETHER ->&dir->lock#2 ->input_pool.lock ->&c->lock ->&ndev->lock ->&obj_hash[i].lock ->nl_table_lock ->nl_table_wait.lock ->&in_dev->mc_tomb_lock ->&im->lock ->cbs_list_lock ->sysfs_symlink_target_lock ->lock ->&root->kernfs_rwsem ->&n->list_lock ->lweventlist_lock ->(console_sem).lock ->&____s->seqcount#2 ->&____s->seqcount FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1329 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1330 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#597 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1329 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#323 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1572 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1314 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#24 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#756 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#707 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1532 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#47 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#584 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#593 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#198 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#25 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1302 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#586 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#572 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1291 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1291 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1285 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#577 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1279 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#573 ->&rq->__lock FD: 30 BD: 1 ..-.: net/wireless/reg.c:533 FD: 702 BD: 2 +.+.: (crda_timeout).work ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#570 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1272 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1272 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1268 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1266 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#555 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1266 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1264 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1261 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1257 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1255 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#547 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#136 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_rx_wq#4 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#546 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#544 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1245 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1242 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1241 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#176 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1236 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#534 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1228 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#172 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#537 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1220 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#169 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#522 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1214 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1209 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#516 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#370 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#162 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1604 ->&rq->__lock FD: 164 BD: 6 +.+.: &tty->ldisc_sem/1 ->&tty->termios_rwsem ->tty_ldiscs_lock ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#507 FD: 1 BD: 1 +.+.: (work_completion)(&tty->SAK_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#28 FD: 1 BD: 51 +.+.: (wq_completion)phy161 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#502 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1194 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1379 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_rx_wq#498 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#154 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1187 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1184 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#490 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#491 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#307 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#84 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#153 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#156 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1039 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1169 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#166 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1218 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1165 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#469 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#478 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#467 FD: 1 BD: 90 +.+.: dev_pm_qos_sysfs_mtx.wait_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#466 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1134 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1124 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#458 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1113 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#23 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#445 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#442 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#441 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#450 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1085 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1078 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1076 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1074 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1072 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#11 FD: 25 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#5 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#428 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#130 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#425 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#243 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#508 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1050 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1048 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#416 FD: 1 BD: 2 +.+.: (wq_completion)nfc37_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#412 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#408 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1036 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1623 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1014 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1013 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#400 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#402 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1007 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#126 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#397 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#999 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#398 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#961 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1283 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#994 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1105 FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#962 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#965 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#20 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#107 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#374 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#961 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#371 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#373 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_tx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#6 FD: 25 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#9 ->&rq->__lock FD: 26 BD: 12 ....: &root->deactivate_waitq ->&p->pi_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1177 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#24 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#26 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#348 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#108 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1175 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1508 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#151 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#956 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#777 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#18 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#214 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#4 FD: 1 BD: 56 ....: key#24 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1090 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#105 FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg2#128 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 25 BD: 1 +.+.: (wq_completion)nfc40_nci_tx_wq#2 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#5 FD: 25 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#4 FD: 25 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#8 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1126 ->&rq->__lock FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg0#132 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#370 FD: 25 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#5 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#3 ->&rq->__lock FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg2#147 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 25 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#3 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#951 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#373 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#954 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#6 FD: 25 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#7 FD: 1 BD: 1 ....: &list->lock#21 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#948 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#945 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#947 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#943 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#104 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#939 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#103 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1510 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#625 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#735 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#9 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1096 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#922 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#925 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#347 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#918 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#343 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#904 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1062 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#333 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#978 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#893 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_tx_wq#92 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#894 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#371 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#158 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#909 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#196 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#716 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1459 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1383 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1379 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#509 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#208 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1388 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1370 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1365 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1460 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#728 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1598 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#333 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1142 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#880 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1307 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#92 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#892 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#5 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#966 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#573 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#407 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#991 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1020 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#422 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1061 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1265 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1029 ->&rq->__lock FD: 36 BD: 1 +.+.: (wq_completion)bond0#76 ->(work_completion)(&(&slave->notify_work)->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1032 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1064 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#557 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1070 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1176 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#464 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1134 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1057 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#538 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1165 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1105 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#451 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1126 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1143 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1148 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1509 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1065 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1199 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#511 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#170 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1222 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#34 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1244 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1255 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1170 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1280 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1384 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1373 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1386 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#621 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#25 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#635 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1346 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1112 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1177 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#376 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1586 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1584 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1593 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#770 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#503 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#487 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1028 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1084 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1230 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1470 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1199 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1222 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1226 ->&rq->__lock FD: 36 BD: 1 +.+.: (wq_completion)bond0#82 ->(work_completion)(&(&slave->notify_work)->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1312 FD: 25 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#12 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#259 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#676 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#268 FD: 2 BD: 2 +.+.: &rdev->filelist_sem ->&rdev->filelist_lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#963 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1060 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#3 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#322 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#4 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1620 ->&rq->__lock FD: 707 BD: 13 +.+.: &devlink->lock_key#72 ->crngs.lock ->&rq->__lock ->fs_reclaim ->devlinks.xa_lock ->&xa->xa_lock#19 ->pcpu_alloc_mutex ->&c->lock ->&n->list_lock ->&obj_hash[i].lock ->&base->lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->batched_entropy_u32.lock ->rtnl_mutex ->rcu_node_0 ->&(&fn_net->fib_chain)->lock ->stack_depot_init_mutex ->&devlink_port->type_lock ->&nsim_trap_data->trap_lock ->&rcu_state.expedited_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1025 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1015 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1603 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#787 ->&rq->__lock FD: 1 BD: 1 +.+.: &type->s_umount_key#49 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#889 FD: 25 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#11 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1573 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1569 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1570 FD: 1 BD: 5 +.+.: (wq_completion)tipc_crypto#10 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#592 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#209 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1558 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#771 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1556 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1554 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1536 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1553 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#23 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#325 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#601 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#210 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#207 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#209 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#599 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1327 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#205 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#599 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1323 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1323 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1323 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#206 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1320 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1319 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1317 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1317 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1282 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1316 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1315 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1315 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#589 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#24 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1313 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1299 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#590 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#587 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#695 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#328 FD: 25 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#24 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#16 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1027 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1411 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1392 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#622 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#157 FD: 17 BD: 53 +...: &dccp_hashinfo.bhash[i].lock ->&dccp_hashinfo.bhash2[i].lock ->stock_lock ->&obj_hash[i].lock ->pool_lock#2 ->clock-AF_INET ->per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) ->&c->lock ->&meta->lock ->kfence_freelist_lock FD: 15 BD: 54 +...: &dccp_hashinfo.bhash2[i].lock ->stock_lock ->&____s->seqcount#2 ->&____s->seqcount ->pool_lock#2 ->&c->lock ->clock-AF_INET ->&obj_hash[i].lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&n->list_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#495 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1167 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1388 FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg1#70 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#880 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#399 FD: 1 BD: 48 +...: l2tp_ip_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#769 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1021 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1311 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#589 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#200 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1310 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#595 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1309 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#199 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#201 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#594 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1306 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#202 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#720 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1439 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1305 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1208 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#591 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#196 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#198 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1301 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#590 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#41 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1300 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1298 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#580 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1296 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1295 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#198 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#577 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#192 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1293 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#573 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#582 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#574 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1289 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1289 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#578 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1288 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1286 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1287 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#568 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1285 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1284 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1283 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1282 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#569 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#575 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#564 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1278 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#563 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1276 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1277 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#564 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1275 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1274 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1271 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#44 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1270 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#561 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#559 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1139 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#418 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1266 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#556 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1265 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1265 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#554 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#557 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1240 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#553 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1263 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#187 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1261 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#553 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1259 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1260 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1258 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1257 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#551 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1257 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#548 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#549 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1252 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#924 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1251 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#555 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#185 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1250 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#548 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#38 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1249 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1248 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#553 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#546 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#183 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#41 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#37 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1242 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#542 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#178 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#538 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#177 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1238 FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#21 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#36 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1234 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1234 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#175 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#536 FD: 1 BD: 1 +.+.: (work_completion)(&td->dispatch_work) FD: 2 BD: 1 +.+.: &bdi->cgwb_release_mutex ->cgwb_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#173 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#35 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#541 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1225 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1225 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1226 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#176 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#538 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#531 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#32 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#528 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#169 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_rx_wq#31 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#529 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#528 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#527 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#525 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1219 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1217 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1215 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1215 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1214 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#528 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#522 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1212 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1213 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1212 FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg0#163 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#521 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#520 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#30 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#166 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#164 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1207 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1207 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1204 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1206 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#514 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#5 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#515 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1200 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#28 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#160 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#509 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#917 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1116 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#29 ->&rq->__lock FD: 1 BD: 1 ....: &port->buf.lock/1 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#158 FD: 1 BD: 4 +.+.: tty_mutex.wait_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#156 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1197 FD: 36 BD: 1 +.+.: (wq_completion)bond0#67 ->(work_completion)(&(&slave->notify_work)->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#500 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#155 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1194 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#160 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1194 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1191 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1193 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1192 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1193 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1187 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#504 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#882 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1185 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#496 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#493 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1184 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1183 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1183 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#155 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1183 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#500 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1181 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1180 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1180 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1181 FD: 25 BD: 5 +.+.: (wq_completion)tipc_rcv#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1179 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#18 FD: 25 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#16 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#86 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#25 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#488 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#157 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#17 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1175 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1173 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#485 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1172 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1593 FD: 36 BD: 1 +.+.: (wq_completion)bond0#72 ->(work_completion)(&(&slave->notify_work)->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1170 FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg1#129 ->&rq->__lock ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#481 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1169 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1167 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#150 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1164 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1164 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#481 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#480 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#478 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1161 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1156 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1157 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#483 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#477 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#482 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1154 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1151 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1152 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#151 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#470 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#471 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1146 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1144 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1142 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#469 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#146 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1138 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#468 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#470 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1133 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1131 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#147 ->&rq->__lock ->&cfs_rq->removed.lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1131 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#463 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#468 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1127 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1121 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#465 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1121 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#455 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1117 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#462 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1116 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#451 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1109 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1109 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#448 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#142 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#23 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#447 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1100 FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg1#80 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg1#160 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg0#162 ->&rq->__lock ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1099 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#137 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1098 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1097 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#136 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1091 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#140 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1094 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1093 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#439 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#138 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#434 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#435 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#131 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#432 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#433 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#130 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#132 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1075 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#135 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1072 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1073 FD: 707 BD: 13 +.+.: &devlink->lock_key#82 ->crngs.lock ->fs_reclaim ->devlinks.xa_lock ->&c->lock ->&xa->xa_lock#19 ->&n->list_lock ->pcpu_alloc_mutex ->&obj_hash[i].lock ->&base->lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->batched_entropy_u32.lock ->rtnl_mutex ->rcu_node_0 ->&rq->__lock ->&(&fn_net->fib_chain)->lock ->rtnl_mutex.wait_lock ->&p->pi_lock ->stack_depot_init_mutex ->&devlink_port->type_lock ->&nsim_trap_data->trap_lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#134 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#131 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#435 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1068 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#22 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#16 FD: 25 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#5 FD: 28 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#5 ->rcu_node_0 ->&rcu_state.expedited_wq ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#5 FD: 25 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_tx_wq#5 FD: 25 BD: 4 +.+.: &pnsocks.lock ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc39_nci_rx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc39_nci_tx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc38_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#14 FD: 25 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#8 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1626 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#426 FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#128 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#6 FD: 25 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#6 FD: 25 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#6 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#8 FD: 25 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#7 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#427 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#20 FD: 85 BD: 48 +.+.: (wq_completion)wg-crypt-wg0#79 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1065 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1065 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#422 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#420 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1058 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1060 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1060 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1055 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#426 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#417 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1053 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1053 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1050 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#130 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1048 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1046 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1045 FD: 25 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#12 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1043 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#411 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#118 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#420 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#413 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#410 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1040 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1038 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#416 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1037 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1037 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1035 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1471 ->&rq->__lock FD: 160 BD: 12 +.+.: (wq_completion)hci0#10 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#413 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#407 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1172 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1018 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#405 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1017 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1009 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1010 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1139 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1008 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1009 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#399 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#335 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1001 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#998 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1406 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#175 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1406 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#336 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#995 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#995 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#18 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#504 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1084 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#120 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1309 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1088 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1104 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1186 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#395 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#399 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#389 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#122 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#397 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#989 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#390 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#986 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#983 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#393 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#116 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#980 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#979 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#117 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#384 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#975 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1495 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#975 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#17 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#378 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#377 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#971 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#111 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#376 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#110 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#375 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#383 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#967 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#9 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#381 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#968 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#965 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#964 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#966 ->&rq->__lock FD: 1 BD: 3 +...: slock-AF_QIPCRTR FD: 27 BD: 2 +.+.: sk_lock-AF_QIPCRTR ->slock-AF_QIPCRTR ->clock-AF_QIPCRTR ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#960 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1273 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#960 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#929 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#959 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#106 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#106 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#11 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1621 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#473 FD: 1 BD: 5 +.+.: (wq_completion)tipc_rcv#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#4 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#616 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#713 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1516 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#719 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1192 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#293 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1011 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#368 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#375 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#104 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1101 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#39 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#955 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#374 FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_rx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#369 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc37_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#952 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#365 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#952 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#366 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#103 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#5 FD: 25 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#6 FD: 30 BD: 1 ..-.: &(&conn->disc_work)->timer FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#102 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#9 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#364 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#953 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#363 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#101 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#950 FD: 1 BD: 1 +.+.: &node->qrtr_tx_lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1379 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#53 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#359 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#358 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#357 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#365 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#364 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#356 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#101 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#358 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#939 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#941 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#100 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#938 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#99 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#353 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#352 FD: 1 BD: 23 +.+.: hci_cb_list_lock.wait_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#101 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1494 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#96 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#350 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#932 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1597 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#935 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#962 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#93 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc42_nci_cmd_wq#3 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1536 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#485 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#110 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#7 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#931 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#684 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1402 FD: 1 BD: 1 ....: &x->wait#27 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#928 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#347 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#926 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#921 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#919 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#344 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#343 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#345 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#913 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#913 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#349 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#340 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#909 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#907 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#905 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#339 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#337 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1251 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1299 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#893 FD: 135 BD: 48 +.+.: team->team_lock_key#83 ->fs_reclaim ->netpoll_srcu ->net_rwsem ->&tn->lock ->_xmit_ETHER ->&dir->lock#2 ->input_pool.lock ->&ndev->lock ->&obj_hash[i].lock ->nl_table_lock ->nl_table_wait.lock ->&in_dev->mc_tomb_lock ->&im->lock ->cbs_list_lock ->sysfs_symlink_target_lock ->lock ->&root->kernfs_rwsem ->&c->lock ->&n->list_lock ->&rq->__lock ->(console_sem).lock ->lweventlist_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#335 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#897 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#332 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#888 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#885 FD: 85 BD: 48 +.+.: (wq_completion)wg-crypt-wg0#80 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 84 BD: 48 +.+.: (wq_completion)wg-kex-wg2#156 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#332 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#968 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#18 FD: 25 BD: 1 +.+.: (wq_completion)nfc42_nci_tx_wq#3 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#146 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#366 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#352 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#897 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#899 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#887 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1605 ->rcu_node_0 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1578 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1594 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1592 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#32 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1394 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1387 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1383 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#639 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1356 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1350 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#614 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#608 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#28 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1316 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#207 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#611 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#10 FD: 84 BD: 48 +.+.: (wq_completion)wg-kex-wg0#158 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 1 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#12 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#635 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#631 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1387 FD: 25 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#16 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#20 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1423 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#588 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#70 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#266 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#25 FD: 25 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#723 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1489 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#744 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1598 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#309 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#329 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1602 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#91 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#789 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#328 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#886 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#883 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#882 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#900 FD: 25 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#12 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#910 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#350 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1381 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1294 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#945 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#943 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#369 FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg2#146 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1287 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#958 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#571 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#189 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1306 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#987 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1566 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1502 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1517 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#403 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#123 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#993 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#18 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1010 ->&rq->__lock FD: 707 BD: 13 +.+.: &devlink->lock_key#76 ->crngs.lock ->fs_reclaim ->devlinks.xa_lock ->&c->lock ->&xa->xa_lock#19 ->pcpu_alloc_mutex ->&obj_hash[i].lock ->&base->lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->&n->list_lock ->batched_entropy_u32.lock ->rtnl_mutex ->rcu_node_0 ->&rq->__lock ->&(&fn_net->fib_chain)->lock ->stack_depot_init_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->&devlink_port->type_lock ->&nsim_trap_data->trap_lock ->&rcu_state.expedited_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1269 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1017 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1020 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1026 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1023 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1024 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 36 BD: 5 +.+.: (wq_completion)bond0#81 ->(work_completion)(&(&slave->notify_work)->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1444 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1256 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#422 ->&rq->__lock FD: 160 BD: 1 +.+.: (wq_completion)hci3#6 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) ->(work_completion)(&(&conn->disc_work)->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1039 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1032 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1029 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1031 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1028 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1026 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1026 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1027 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#565 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1030 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1269 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1030 FD: 1 BD: 1 ....: &f->f_owner.lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1036 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#419 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_tx_wq#412 ->&rq->__lock FD: 172 BD: 1 +.+.: (wq_completion)hci3#5 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#562 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1262 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1500 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#7 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1540 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#436 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1079 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#136 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1228 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#26 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#496 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1168 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1118 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#379 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1054 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1052 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#148 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#17 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1193 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1034 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1097 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1091 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1095 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1103 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#463 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#462 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1133 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#149 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#446 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1166 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1032 FD: 1 BD: 3 ....: &rdev->filelist_lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#926 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1450 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#510 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#197 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1189 FD: 1 BD: 2 ....: &substream->lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#249 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#518 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1202 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1204 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1461 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1096 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#449 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#634 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1203 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1213 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1217 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#531 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1228 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1225 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#205 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#37 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#543 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#548 ->&rq->__lock FD: 160 BD: 12 +.+.: (wq_completion)hci5#4 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#192 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#568 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1275 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#970 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#572 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#566 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1302 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#599 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#606 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1331 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#208 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1332 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1332 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1331 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#231 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#650 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#645 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1339 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1336 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#210 FD: 84 BD: 48 +.+.: (wq_completion)wg-kex-wg2#153 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 84 BD: 48 +.+.: (wq_completion)wg-kex-wg0#156 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 52 BD: 48 +.+.: (wq_completion)wg-kex-wg0#155 ->(work_completion)(&peer->transmit_handshake_work) FD: 85 BD: 48 +.+.: (wq_completion)wg-crypt-wg0#78 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 85 BD: 48 +.+.: (wq_completion)wg-crypt-wg1#77 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#7 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1356 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#649 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1353 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1367 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#238 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#902 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#703 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1386 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#517 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#161 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#330 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1003 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#3 FD: 25 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#3 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1007 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1010 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1611 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1587 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1587 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1574 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1573 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1573 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#784 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#779 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1529 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc42_nci_cmd_wq#4 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#17 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1507 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#677 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#252 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#180 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1328 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#155 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_rx_wq#575 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1137 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1127 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1163 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#747 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1107 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1087 FD: 29 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#12 ->&rq->__lock ->stock_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg1#81 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1124 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1053 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#386 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1160 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#503 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#502 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#513 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#547 ->&rq->__lock FD: 1 BD: 51 +.+.: (wq_completion)phy163 FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#424 FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg2#74 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg0#75 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#949 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1487 FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg2#139 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#919 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#9 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1300 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1409 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#12 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#613 ->&rq->__lock FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg0#71 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1362 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#648 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1386 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1385 FD: 25 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#10 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1408 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1465 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#443 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1432 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1433 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1491 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1490 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#37 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#883 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#917 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#901 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#903 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#956 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#959 FD: 1 BD: 47 +...: _xmit_PHONET_PIPE FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#477 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1147 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#285 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#409 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1467 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1625 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#793 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1022 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#450 FD: 134 BD: 48 +.+.: team->team_lock_key#72 ->fs_reclaim ->netpoll_srcu ->net_rwsem ->&tn->lock ->_xmit_ETHER ->&dir->lock#2 ->input_pool.lock ->&ndev->lock ->&obj_hash[i].lock ->nl_table_lock ->nl_table_wait.lock ->&in_dev->mc_tomb_lock ->&im->lock ->cbs_list_lock ->sysfs_symlink_target_lock ->&c->lock ->&n->list_lock ->lock ->&root->kernfs_rwsem ->quarantine_lock ->remove_cache_srcu ->lweventlist_lock ->(console_sem).lock ->&rq->__lock ->&cfs_rq->removed.lock ->pool_lock#2 ->&____s->seqcount#2 ->&____s->seqcount FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1001 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#119 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#998 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1006 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1155 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1014 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1013 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1616 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1409 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1520 FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_tx_wq#2 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1611 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#313 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1610 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1370 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#581 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1593 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1590 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1582 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1585 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#162 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#648 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1581 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1369 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1581 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1575 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1572 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1575 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1572 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#606 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1333 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1113 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#775 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1333 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1333 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1567 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1565 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#781 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1559 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#780 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1135 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#602 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#335 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#510 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1559 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#612 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1550 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1556 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#83 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#755 FD: 1 BD: 1 +.+.: (wq_completion)nfc46_nci_tx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1332 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1532 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1069 FD: 25 BD: 1 +.+.: (wq_completion)nfc38_nci_tx_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1076 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#972 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1024 ->&rq->__lock FD: 1 BD: 1 ....: elock-AF_INET FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#343 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#208 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1095 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#944 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1331 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#604 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#212 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#603 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#600 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#610 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1330 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1330 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#206 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#591 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#139 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#324 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1253 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1433 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_rx_wq#1329 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1058 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#602 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#349 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1328 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#598 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#601 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#608 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1328 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#607 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1327 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1326 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1326 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1325 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#210 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#596 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1382 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1325 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1325 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1324 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1324 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#595 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#598 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1322 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1322 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#594 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#597 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1322 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#593 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#204 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1320 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#209 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1320 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#592 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#602 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1318 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1318 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#594 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1318 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#185 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#590 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1317 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1280 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1316 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#593 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#600 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#45 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#45 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#49 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#202 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#204 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1315 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1314 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#592 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#588 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1314 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1313 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#44 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#44 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#449 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1312 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1312 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#201 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#597 FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#24 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#24 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1195 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1569 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1570 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1380 FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg0#66 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1163 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#494 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1041 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#519 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1451 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1584 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1607 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1014 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#777 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#182 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#334 ->&rq->__lock FD: 1 BD: 93 +.+.: deferred_probe_mutex.wait_lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1025 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#173 ->&rq->__lock FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg2#70 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#330 FD: 84 BD: 48 +.+.: (wq_completion)wg-kex-wg1#154 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 1 BD: 114 +.+.: &pa->pa_lock#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1388 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1187 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1140 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#383 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#389 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#884 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#388 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#982 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1111 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#970 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#456 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#4 FD: 25 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#4 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1019 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#403 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#402 FD: 1 BD: 5 +.+.: (wq_completion)tipc_send#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1579 FD: 25 BD: 51 +.+.: (wq_completion)phy135 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#460 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#896 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#206 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#310 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1571 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1558 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1311 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1311 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#586 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1310 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#202 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#205 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#585 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#43 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#43 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1310 FD: 25 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#22 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#42 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1555 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#587 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#42 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#42 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#200 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#583 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1308 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#586 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#203 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1307 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#199 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#582 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#585 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#279 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#742 ->&rq->__lock FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg1#130 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1430 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1305 FD: 26 BD: 1 +.+.: &sfilter->notify_lock ->&rq->__lock ->&obj_hash[i].lock ->&meta->lock ->kfence_freelist_lock ->pool_lock#2 FD: 25 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#14 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1305 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1304 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1304 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1303 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#581 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#584 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#505 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1086 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1303 ->&rq->__lock FD: 1 BD: 49 ..-.: &list->lock#20 FD: 1 BD: 1 +.-.: x25_list_lock FD: 1 BD: 1 +.-.: x25_forward_list_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1082 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#674 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#701 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#201 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1303 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#580 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#583 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#579 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#41 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#589 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#45 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1301 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#195 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#197 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1300 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#194 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#578 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#581 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#199 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1299 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1298 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1298 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1297 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#577 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1297 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1296 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_rx_wq#1295 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_tx_wq#1296 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#576 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#579 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#575 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#584 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#193 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#195 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1294 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#574 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#194 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1293 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1292 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#576 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1292 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1292 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1291 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#580 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#191 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#193 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1290 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#579 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1290 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#571 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1290 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1289 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#570 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1288 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#569 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#571 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1288 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1287 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#570 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1285 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1284 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1284 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#567 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#566 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#568 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1282 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1281 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1281 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#565 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#567 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#192 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#190 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1279 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#195 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1279 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#191 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#194 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1278 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#565 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#562 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1276 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1274 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1274 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#561 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#563 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#569 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1272 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1271 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#562 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1271 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#40 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#40 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#567 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1270 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#23 FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#23 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#23 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#188 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1268 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#558 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#560 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1268 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1267 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#557 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#559 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#39 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#22 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#39 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#22 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#43 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#22 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#187 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#558 FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg0#141 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1264 FD: 160 BD: 1 +.+.: (wq_completion)hci4#6 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) FD: 1 BD: 5 +.+.: (wq_completion)tipc_crypto#8 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1179 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1264 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1263 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1263 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#556 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#561 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1262 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#552 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#554 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#560 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#186 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#191 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#127 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#190 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1551 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1260 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1260 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1259 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#558 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#267 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_rx_wq#552 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1258 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#186 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#189 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#549 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1256 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1256 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1255 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1254 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#550 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1254 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1254 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1253 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1252 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#4 ->&rq->__lock FD: 707 BD: 13 +.+.: &devlink->lock_key#83 ->crngs.lock ->fs_reclaim ->devlinks.xa_lock ->&c->lock ->&xa->xa_lock#19 ->&n->list_lock ->pcpu_alloc_mutex ->&obj_hash[i].lock ->&base->lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->batched_entropy_u32.lock ->rtnl_mutex ->rtnl_mutex.wait_lock ->&p->pi_lock ->rcu_node_0 ->&rq->__lock ->&(&fn_net->fib_chain)->lock ->stack_depot_init_mutex ->&____s->seqcount#2 ->&____s->seqcount ->&devlink_port->type_lock ->&nsim_trap_data->trap_lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#253 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1061 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#951 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1436 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1252 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1250 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#183 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#188 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1250 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1249 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1249 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#38 FD: 9 BD: 51 +...: slock-AF_INET6/1 ->&sctp_ep_hashtable[i].lock ->&obj_hash[i].lock ->pool_lock#2 ->clock-AF_INET6 ->&____s->seqcount ->key#25 FD: 35 BD: 2 +.+.: sk_lock-AF_INET6/1 ->slock-AF_INET6 ->rlock-AF_INET6 ->&list->lock#17 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1248 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#182 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#184 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#187 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1248 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1247 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1247 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#544 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#552 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1246 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#181 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#186 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1246 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1245 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#543 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#545 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#551 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1244 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#180 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#542 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#550 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1244 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1243 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1243 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#179 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#181 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#549 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#184 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#541 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1243 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#540 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#183 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#541 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#539 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1242 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1241 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1240 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1240 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#540 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#182 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#179 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1238 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#546 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1237 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#539 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1237 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#21 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#545 FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#21 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1236 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#36 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#178 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#40 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#181 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1235 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1235 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#38 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#35 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#35 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1234 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#536 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1233 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#544 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1233 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#177 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#180 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#535 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#537 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#543 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#34 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1232 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1231 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1231 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#174 ->&rq->__lock FD: 34 BD: 3 +.+.: &q->blkcg_mutex ->(&sq->pending_timer) ->&obj_hash[i].lock ->&base->lock ->percpu_counters_lock ->pcpu_lock ->pool_lock#2 ->&q->queue_lock ->&rq->__lock ->pool_lock FD: 1 BD: 1 ....: (&lo->timer) FD: 1 BD: 1 ....: &lo->lo_work_lock FD: 1 BD: 4 ....: (&sq->pending_timer) FD: 1 BD: 1 ....: (&bdi->laptop_mode_wb_timer) FD: 1 BD: 1 ....: &tags->lock FD: 25 BD: 1 +.+.: (wq_completion)kintegrityd ->&rq->__lock FD: 37 BD: 2 +.+.: (work_completion)(&blkg->free_work) ->&q->blkcg_mutex ->&obj_hash[i].lock ->pool_lock#2 ->&xa->xa_lock#11 ->pcpu_lock ->blk_queue_ida.xa_lock ->percpu_ref_switch_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#176 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#179 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#178 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1231 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1230 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#33 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#33 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#172 FD: 1 BD: 1 ....: _rs.lock#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1230 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1229 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#533 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#535 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1229 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#532 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1227 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#534 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1227 FD: 1 BD: 53 ....: key#25 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1227 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#171 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#173 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#530 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#532 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1224 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#529 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#32 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1224 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1223 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1223 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#170 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#536 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#175 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1222 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1221 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#33 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_tx_wq#31 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1221 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#527 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1220 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#535 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#526 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#534 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1220 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#533 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1218 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1218 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#524 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#526 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#167 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#523 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#104 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#371 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#171 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1217 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1216 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#521 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#523 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1216 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#165 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#167 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1214 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1213 FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg0#82 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg2#81 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg2#161 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1212 ->&rq->__lock FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg2#160 ->&rq->__lock ->(work_completion)(&peer->transmit_handshake_work) FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg1#162 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg0#164 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1211 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#519 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#527 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1211 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1210 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#518 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1210 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1209 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#30 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#517 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#32 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#169 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1208 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1208 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1206 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#518 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#524 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#515 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#517 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#523 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1205 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1204 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#516 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#522 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1203 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1202 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#29 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#31 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#165 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#168 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#521 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#512 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1202 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1201 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#509 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#511 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#164 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#167 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#28 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#30 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#161 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#163 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#166 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#505 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#159 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#504 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#507 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#506 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#27 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1589 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#903 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#908 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1093 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1066 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1076 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#158 ->&rq->__lock FD: 25 BD: 1 +.+.: (work_completion)(&tty->hangup_work) ->&rq->__lock FD: 29 BD: 6 +.+.: &o_tty->termios_rwsem/1 ->vmap_area_lock ->&obj_hash[i].lock ->purge_vmap_area_lock ->pool_lock#2 ->&rq->__lock FD: 262 BD: 4 +.+.: &tty->legacy_mutex/1 ->tasklist_lock ->&tty->files_lock ->&tty->write_wait ->&tty->read_wait ->&tty->ldisc_sem ->&tty->ctrl.lock ->&obj_hash[i].lock ->&rq->__lock ->pool_lock#2 ->&f->f_lock FD: 31 BD: 2 +.+.: (work_completion)(&tty->hangup_work)#2 ->(work_completion)(&buf->work) ->&obj_hash[i].lock ->pool_lock#2 ->&tty->files_lock ->stock_lock ->&meta->lock ->kfence_freelist_lock ->quarantine_lock ->&rq->__lock FD: 9 BD: 5 ....: &xa->xa_lock#21 ->&c->lock ->pool_lock#2 ->&obj_hash[i].lock FD: 50 BD: 4 +.+.: devpts_mutex ->&xa->xa_lock#21 ->&dentry->d_lock ->&fsnotify_mark_srcu ->&sb->s_type->i_lock_key#25 ->&s->s_inode_list_lock ->&xa->xa_lock#9 ->&obj_hash[i].lock ->pool_lock#2 ->&rq->__lock ->rcu_node_0 ->cdev_lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#160 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#503 FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#19 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#19 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#161 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1198 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1197 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1197 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#26 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#27 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1196 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1419 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1196 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#501 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1196 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#508 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1195 FD: 25 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#3 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#506 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#947 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1057 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#7 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#499 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#501 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#500 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1192 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1191 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#497 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#499 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1191 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1190 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#156 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1189 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1188 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#495 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#497 FD: 30 BD: 1 +.-.: (&pool->idle_timer) ->&pool->lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1188 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#494 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1186 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1185 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#153 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#492 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#494 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#493 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#499 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#492 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1179 FD: 1 BD: 5 +.+.: (wq_completion)tipc_send#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#489 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#491 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1178 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1177 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#18 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#152 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#154 FD: 25 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#17 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1176 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#490 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#25 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1176 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1175 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1174 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#489 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1174 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#487 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#150 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#486 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#487 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1173 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1172 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#484 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#486 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#492 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#483 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#491 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1171 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1170 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1438 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1570 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#718 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#372 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#367 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#149 FD: 25 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#8 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#9 FD: 25 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 1 ....: &tun->readq FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#151 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1577 FD: 25 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#6 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#507 FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg1#65 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1591 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1596 FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg2#129 ->(work_completion)(&peer->transmit_handshake_work) FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1604 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#154 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#483 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#489 FD: 160 BD: 1 +.+.: (wq_completion)hci0#12 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) FD: 172 BD: 1 +.+.: (wq_completion)hci0#11 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1168 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1168 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1166 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1166 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#153 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1164 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#482 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1163 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1162 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#479 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1162 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1161 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1161 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1543 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#486 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1160 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1159 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1159 FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg0#131 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#477 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1159 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1158 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#476 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#478 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1158 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1156 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#475 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1156 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#474 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#476 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1155 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1154 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#473 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#475 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#481 FD: 30 BD: 1 ..-.: &(&net->ipv6.addr_chk_work)->timer FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1153 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#474 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1152 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1152 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#480 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#471 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1151 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#146 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#148 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#472 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1151 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1150 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1149 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1149 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1148 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1147 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1147 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1146 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#468 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#470 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1145 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#145 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#147 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1145 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1144 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1144 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1143 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1142 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1141 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1141 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#144 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1140 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1140 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#467 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#475 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1139 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1138 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1137 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#474 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1136 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1136 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#473 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1135 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#464 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#466 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1134 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#472 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#143 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#145 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1132 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1132 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#463 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1132 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1131 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#462 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1130 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#144 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#461 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1130 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1129 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1129 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#460 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1128 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1127 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1125 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#459 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1125 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#467 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#458 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#460 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#466 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1123 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1123 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1122 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1122 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#457 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#459 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#143 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#456 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1120 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1120 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1119 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1119 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#457 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1118 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1117 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#454 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#456 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1117 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#453 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#455 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1116 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1115 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1114 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1114 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1114 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#452 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#454 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1113 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1112 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1112 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#453 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#459 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1110 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1110 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#450 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#458 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#449 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1109 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#457 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1108 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1108 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1107 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1108 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1106 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1107 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#140 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#145 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1105 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#447 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1104 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1103 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#446 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#448 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#454 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1102 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#23 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#445 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#139 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#141 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#138 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#453 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1102 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#140 ->&rq->__lock FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg2#80 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg0#81 ->&rq->__lock ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#444 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#446 FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg2#159 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg2#158 ->(work_completion)(&peer->transmit_handshake_work) FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg1#159 ->(work_completion)(&peer->transmit_handshake_work) FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#452 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1100 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1099 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#443 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#451 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1099 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#139 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#142 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1098 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1097 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#444 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#135 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#137 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#443 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1096 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1095 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#440 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#442 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#448 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1094 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1092 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#439 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#441 FD: 29 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#447 ->&rq->__lock ->stock_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->pool_lock ->percpu_counters_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1092 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1091 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#438 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1090 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1089 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#437 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1088 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1087 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1087 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#135 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#436 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#438 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1086 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#435 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#437 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1085 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1084 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1083 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1083 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1083 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1082 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#134 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#137 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1082 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1081 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1081 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1080 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#433 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#441 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1080 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1079 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#133 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#434 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#440 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1077 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#431 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#439 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1075 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#432 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#438 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1074 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1074 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1072 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#431 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#437 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1071 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1070 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#7 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1069 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1068 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#427 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#429 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#22 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#22 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#16 FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#16 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#15 FD: 25 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#16 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#9 FD: 25 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 51 +.+.: (wq_completion)phy164 FD: 25 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#7 FD: 25 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#6 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#6 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc35_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_rx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc36_nci_cmd_wq#6 FD: 1 BD: 3 +.+.: resource_mutex FD: 55 BD: 2 +.+.: sk_lock-AF_PHONET ->slock-AF_PHONET ->&rq->__lock ->&pnsocks.lock ->resource_mutex ->&obj_hash[i].lock ->&x->wait#3 ->stock_lock ->key ->pcpu_lock ->percpu_counters_lock ->pool_lock#2 ->&cfs_rq->removed.lock ->pool_lock ->port_mutex#2 ->fs_reclaim ->&____s->seqcount ->&c->lock ->&n->list_lock ->&base->lock FD: 1 BD: 2 +...: clock-AF_PHONET FD: 1 BD: 1 +.+.: (wq_completion)nfc38_nci_rx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc39_nci_cmd_wq#3 FD: 25 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#8 FD: 25 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#5 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#996 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#329 FD: 25 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#3 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc35_nci_cmd_wq#2 FD: 25 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#9 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#15 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#21 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#21 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#133 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1068 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1067 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1067 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#5 FD: 25 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#7 FD: 25 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#5 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#6 FD: 25 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#5 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#132 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#327 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#6 FD: 25 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#10 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1392 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#120 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1286 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#8 FD: 25 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#8 FD: 25 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#8 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#14 FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#14 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#433 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#400 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#7 FD: 25 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1066 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1041 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#423 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#431 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#424 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#430 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#128 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#131 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1063 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1063 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1062 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#419 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1062 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#421 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1061 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1057 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1055 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#418 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#420 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1055 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1580 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#510 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#881 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#501 FD: 1 BD: 3 ....: &card->remove_sleep FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1173 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1054 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#419 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#425 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1052 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1052 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1051 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1050 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#125 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#127 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1049 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1048 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#416 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1047 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1047 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1046 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#415 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#417 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#414 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1046 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1045 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1044 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1044 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#421 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1044 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1043 FD: 25 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#10 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#588 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1153 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1059 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1043 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1049 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1042 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1042 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1042 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1041 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#418 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#409 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#411 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#417 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#410 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1037 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1476 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#112 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#242 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1475 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#408 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#414 FD: 1 BD: 2 ....: &rmidi->open_wait FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#519 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#129 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_rx_wq#126 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1034 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1033 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1019 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1149 FD: 58 BD: 1 +.+.: &rmidi->open_mutex ->fs_reclaim ->pool_lock#2 ->&card->files_lock ->&rmidi->open_wait ->&card->ctl_files_rwlock ->&obj_hash[i].lock ->&c->lock ->&n->list_lock ->&rdev->filelist_sem ->&substream->lock ->(work_completion)(&runtime->event_work) ->quarantine_lock ->&____s->seqcount#2 ->&____s->seqcount ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#411 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1017 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#124 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#402 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#410 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1016 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1015 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1013 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1012 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1011 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1009 FD: 1 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#401 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1040 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1374 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1008 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#124 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#127 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#408 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1006 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1005 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1004 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#123 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1004 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1003 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1002 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#398 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#406 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#405 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1000 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#998 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#997 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#19 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#121 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#267 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#445 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1033 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#880 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#617 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1146 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#19 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1497 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#396 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#125 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#997 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#397 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#996 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#121 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#994 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#994 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#119 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1101 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1281 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#993 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#394 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#993 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#393 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#392 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#394 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#992 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#991 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#391 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#990 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#392 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#398 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#990 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#989 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#118 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#391 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#117 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#118 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#988 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#987 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#388 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#987 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#986 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#387 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#986 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#985 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#984 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#386 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#388 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#983 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#983 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#116 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#117 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#120 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#385 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#115 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#384 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#386 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#392 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#982 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#115 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#980 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#385 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#391 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#979 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#979 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#113 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#114 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#978 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#382 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#390 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#383 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#977 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#381 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#389 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#113 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#975 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#116 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#380 ->&rq->__lock FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg2#138 ->(work_completion)(&peer->transmit_handshake_work) FD: 25 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1488 FD: 1 BD: 2 +.+.: (wq_completion)nfc36_nci_cmd_wq#10 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#498 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#974 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#379 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#381 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#973 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_rx_wq#973 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#112 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#111 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#972 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#974 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#380 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#973 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#385 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#16 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#114 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#971 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#969 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#378 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#384 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#109 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#113 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#970 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#112 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#377 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#969 FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#18 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#965 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#375 FD: 1 BD: 3 +...: clock-AF_QIPCRTR FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#964 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#964 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#962 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#963 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#108 FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg2#65 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#292 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#766 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#960 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#961 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#958 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#959 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#957 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#957 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#105 FD: 1 BD: 1 +.+.: &resv_map->lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#109 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#370 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#372 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#377 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#6 FD: 25 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_rx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc35_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_rx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc36_nci_cmd_wq#5 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1394 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc37_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc38_nci_rx_wq#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc38_nci_cmd_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc39_nci_tx_wq#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc39_nci_cmd_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#6 FD: 25 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1094 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#741 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#369 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#954 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#954 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#609 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1201 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#5 FD: 25 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#5 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#10 FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#10 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#5 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#103 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#107 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#244 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#367 ->&rq->__lock FD: 5 BD: 2 +.+.: (ima_keys_delayed_work).work ->ima_keys_lock ->&obj_hash[i].lock ->pool_lock#2 FD: 30 BD: 1 ..-.: security/integrity/ima/ima_queue_keys.c:35 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#953 FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#950 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#365 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#951 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#102 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#105 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#362 ->&rq->__lock FD: 12 BD: 53 +...: &sctp_port_hashtable[i].lock ->&____s->seqcount#2 ->&____s->seqcount ->&c->lock ->pool_lock#2 ->&obj_hash[i].lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 1 BD: 4 ....: qrtr_nodes_lock FD: 25 BD: 1 +.+.: &node->ep_lock ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#364 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#949 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#361 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#363 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#948 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#946 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#360 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#362 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#367 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#946 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#944 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#221 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1380 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1349 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1382 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#11 FD: 25 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#361 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#945 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#360 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#944 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#942 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#942 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#359 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#943 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#100 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#355 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#363 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#942 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#357 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#362 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#99 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#940 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#354 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#98 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#360 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#938 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#939 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#937 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#354 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#359 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#938 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#936 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#937 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#936 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#934 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#97 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#351 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#933 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#353 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#358 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#107 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#148 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#97 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#357 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#934 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#912 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#911 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#379 FD: 1 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#3 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1597 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#881 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1466 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1080 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1499 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1553 FD: 1 BD: 3 +...: slock-AF_PPPOX FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1511 ->&rq->__lock FD: 172 BD: 1 +.+.: (wq_completion)hci0#9 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#14 FD: 25 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#5 ->&rq->__lock FD: 6 BD: 1 +.+.: put_task_map-wait-type-override#3 ->&obj_hash[i].lock ->pool_lock#2 ->stock_lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1600 ->&rq->__lock FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg0#142 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#931 FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#95 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#96 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#99 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#351 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#356 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#9 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#9 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#930 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#929 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#930 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#348 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#350 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#715 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#16 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1098 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#698 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1389 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#355 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#926 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#927 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#925 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#925 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#349 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#354 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#923 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#924 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#921 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#923 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#94 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#921 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#346 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#98 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#348 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#920 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#920 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#345 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#920 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#351 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#916 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#918 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#917 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#915 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#915 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#915 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#914 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#912 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#93 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#94 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#342 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#344 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#913 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#912 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#910 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#911 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#910 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#909 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#908 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#340 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#907 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#906 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#129 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1504 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#904 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#904 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#903 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#339 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#345 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#902 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#338 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#344 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#902 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#900 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#898 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#338 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#899 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#336 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#337 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1582 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#342 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#898 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1190 FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#8 FD: 172 BD: 1 +.+.: (wq_completion)hci1#3 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 85 BD: 48 +.+.: (wq_completion)wg-crypt-wg2#77 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#168 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#513 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1155 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1086 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#895 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#336 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#341 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#896 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#895 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#497 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#340 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#894 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_rx_wq#93 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#96 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#891 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#892 FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#7 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#334 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#890 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#891 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#91 FD: 1 BD: 1 +.+.: &ctx->vb_mutex FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#890 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#333 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#338 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#888 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#887 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#885 ->&rq->__lock FD: 85 BD: 48 +.+.: (wq_completion)wg-crypt-wg2#79 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 84 BD: 48 +.+.: (wq_completion)wg-kex-wg1#158 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 52 BD: 48 +.+.: (wq_completion)wg-kex-wg2#157 ->(work_completion)(&peer->transmit_handshake_work) FD: 84 BD: 48 +.+.: (wq_completion)wg-kex-wg0#160 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 52 BD: 48 +.+.: (wq_completion)wg-kex-wg1#157 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#331 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#119 ->&rq->__lock FD: 1 BD: 1 ....: _rs.lock#3 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#977 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#982 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1304 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#382 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#953 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#5 FD: 25 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#6 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#6 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#106 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#366 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#5 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1424 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#33 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1479 FD: 25 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#12 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#368 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#556 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#11 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#940 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#356 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#102 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#936 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#935 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#934 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#10 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#10 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#932 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#931 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#928 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#928 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#924 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#95 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#918 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#914 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#911 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#906 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#905 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#346 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#897 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#895 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#890 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#886 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#90 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#326 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#331 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1601 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1597 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#772 FD: 36 BD: 1 +.+.: (wq_completion)bond0#84 ->&rq->__lock ->(work_completion)(&(&slave->notify_work)->work) FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#767 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#767 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1524 FD: 25 BD: 2 +.+.: (wq_completion)nfc39_nci_cmd_wq#5 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc49_nci_cmd_wq#2 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#12 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#729 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1487 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#275 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#700 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#680 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1407 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1398 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#659 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1389 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#646 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1380 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#643 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#236 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1381 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#641 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#233 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#235 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#234 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1389 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1373 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1368 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1361 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1360 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1355 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#640 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#623 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1345 FD: 25 BD: 1 +.+.: (wq_completion)nfc35_nci_tx_wq#5 ->&rq->__lock ->&cfs_rq->removed.lock FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#12 FD: 52 BD: 48 +.+.: (wq_completion)wg-kex-wg1#155 ->(work_completion)(&peer->transmit_handshake_work) FD: 52 BD: 48 +.+.: (wq_completion)wg-kex-wg2#154 ->(work_completion)(&peer->transmit_handshake_work) FD: 52 BD: 48 +.+.: (wq_completion)wg-kex-wg0#157 ->(work_completion)(&peer->transmit_handshake_work) ->&rq->__lock FD: 85 BD: 48 +.+.: (wq_completion)wg-crypt-wg2#78 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 84 BD: 48 +.+.: (wq_completion)wg-kex-wg2#155 ->&rq->__lock ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#213 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#213 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#614 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1324 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1313 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#203 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#48 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#46 FD: 52 BD: 48 +.+.: (wq_completion)wg-kex-wg1#153 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1339 FD: 52 BD: 48 +.+.: (wq_completion)wg-kex-wg2#152 ->(work_completion)(&peer->transmit_handshake_work) FD: 25 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#6 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#6 ->&rq->__lock FD: 84 BD: 48 +.+.: (wq_completion)wg-kex-wg1#156 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1342 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#637 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1358 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#228 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#631 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#634 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1387 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1376 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#233 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1378 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#234 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#639 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1385 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1385 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1384 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#241 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#14 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#14 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#671 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1413 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1418 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1423 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#672 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc50_nci_tx_wq FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1427 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#14 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1309 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#204 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#596 ->&rq->__lock FD: 36 BD: 5 +.+.: (wq_completion)bond0#80 ->&rq->__lock ->(work_completion)(&(&slave->notify_work)->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1442 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1441 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1448 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#703 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1464 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1469 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1471 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1089 FD: 25 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#15 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#24 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1481 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1486 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1490 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1494 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#728 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1493 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#16 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#774 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1574 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1551 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1571 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1599 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#776 FD: 1 BD: 95 +.+.: wq_pool_attach_mutex.wait_lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1599 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1600 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1612 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1623 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#879 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#882 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#324 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#325 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1425 ->&rq->__lock FD: 2 BD: 90 +.+.: (work_completion)(flush) ->&list->lock#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#328 FD: 1 BD: 3 +.+.: chan_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#94 FD: 25 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#14 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1391 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#881 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#884 FD: 85 BD: 48 +.+.: (wq_completion)wg-crypt-wg1#79 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#888 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#95 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#889 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#424 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1307 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#907 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#906 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#97 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#916 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#919 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#200 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#922 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#353 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1295 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#929 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#927 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#484 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#100 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#946 FD: 1 BD: 4 ....: local_port_range_lock.seqcount FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#948 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#5 FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg0#149 ->(work_completion)(&peer->transmit_handshake_work) FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg1#147 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg0#150 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 85 BD: 1 +.+.: (wq_completion)wg-crypt-wg1#74 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg1#148 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc36_nci_cmd_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#968 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#583 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#969 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1277 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1294 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#394 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#395 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#704 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#958 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1002 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#505 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1553 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1186 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#990 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#390 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#400 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1001 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1005 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1005 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#409 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1011 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1015 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1012 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1269 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1016 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1016 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1018 ->&rq->__lock FD: 1 BD: 96 +.+.: rcu_state.exp_wake_mutex.wait_lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1022 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1023 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1022 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1023 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#427 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#670 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#421 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1063 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1253 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1259 FD: 1 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#5 FD: 25 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#16 ->&rq->__lock FD: 28 BD: 3 +.+.: port_mutex#2 ->&rq->__lock ->local_port_range_lock.seqcount ->&pnsocks.lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#425 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#637 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#42 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#185 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#37 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1069 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1070 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1073 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#430 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1078 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1078 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1237 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1236 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1241 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1219 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#525 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1205 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#512 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#165 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1195 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1184 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1182 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1171 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1169 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1153 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1138 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#150 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1135 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1128 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1075 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#978 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1056 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1059 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1056 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1051 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1106 FD: 25 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#13 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#5 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1120 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1059 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#469 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1047 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1049 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1051 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1058 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_rx_wq#414 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#490 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1181 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#495 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#436 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1085 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#444 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#347 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1088 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1039 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#415 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1038 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1038 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1031 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#138 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#596 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1480 FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg0#161 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1101 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#144 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1103 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1104 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1111 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1115 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1119 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#141 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1121 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1123 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1027 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1130 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#465 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1133 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1137 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1141 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1150 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#147 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1157 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#484 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1158 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#488 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1081 FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1518 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#514 ->&rq->__lock FD: 1 BD: 2 +.+.: (work_completion)(&msk->work) FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#741 FD: 1 BD: 48 ....: &list->lock#22 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#331 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#976 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#61 ->&rq->__lock FD: 1 BD: 1 ....: &ps->lock FD: 1 BD: 48 +.+.: __ip_vs_mutex.wait_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#133 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#79 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1145 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#10 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#901 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#540 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#737 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1171 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#691 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#520 FD: 25 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#20 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#59 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#69 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#89 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#512 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1198 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1199 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1203 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1205 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1206 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1207 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1210 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#24 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1468 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#124 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1189 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1020 FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg1#161 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1211 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1216 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#532 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#170 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1219 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#174 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1221 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1224 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#533 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#539 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#177 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1229 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#36 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#34 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1235 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1238 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#537 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1239 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1239 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1246 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1247 ->&rq->__lock FD: 172 BD: 1 +.+.: (wq_completion)hci5#3 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#190 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#193 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#560 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1273 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#752 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_rx_wq#341 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1375 FD: 25 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1280 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#3 FD: 84 BD: 1 +.+.: (wq_completion)wg-kex-wg1#140 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) FD: 25 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#12 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1278 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1275 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#197 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1293 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1301 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1302 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#601 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1319 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1319 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#604 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#605 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#207 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1326 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#600 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#638 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#230 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#232 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#237 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1384 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1390 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#650 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#121 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#327 FD: 1 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1040 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1506 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1064 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#256 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#901 FD: 36 BD: 5 +.+.: (wq_completion)bond0#79 ->(work_completion)(&(&slave->notify_work)->work) FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#879 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#235 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1092 ->&rq->__lock FD: 160 BD: 12 +.+.: (wq_completion)hci1#2 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) ->(work_completion)(&(&conn->disc_work)->work) FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#74 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1440 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#245 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#15 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1381 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1200 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1233 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#159 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1178 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1188 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#646 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#482 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#233 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1031 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1122 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1110 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1111 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#976 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#382 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#885 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#323 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#314 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#999 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#997 FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#35 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1616 ->&rq->__lock FD: 1 BD: 51 +.+.: (wq_completion)phy136 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1600 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1592 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1586 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1577 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1574 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1576 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1568 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#782 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1557 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#91 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1547 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1546 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#304 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#756 FD: 25 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#17 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#36 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#21 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#290 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1509 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1482 FD: 25 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#16 ->&rq->__lock FD: 702 BD: 2 +.+.: (reg_check_chans).work ->rtnl_mutex FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1021 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1449 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#263 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1448 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1405 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1382 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#642 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1350 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#515 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#506 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_tx_wq#496 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock ->pool_lock#2 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#642 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#640 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#152 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1143 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1283 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#572 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#730 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#511 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1276 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1129 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#271 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#508 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#472 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#163 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1162 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#464 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#502 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#406 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1029 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#455 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1185 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#17 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#899 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1102 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1034 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1090 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#905 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#172 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#531 FD: 172 BD: 1 +.+.: (wq_completion)hci4#5 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#426 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#981 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1066 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1089 FD: 41 BD: 1 .+.+: sb_writers#12 ->mount_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#476 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1178 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#272 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#164 ->&rq->__lock FD: 1 BD: 3 +...: clock-AF_PPPOX FD: 31 BD: 2 +.+.: sk_lock-AF_PPPOX ->&rq->__lock ->slock-AF_PPPOX ->chan_lock ->&obj_hash[i].lock ->&x->wait#3 ->clock-AF_PPPOX ->&pn->hash_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#513 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1201 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#530 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#529 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1077 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1071 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#430 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#554 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#434 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#547 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#6 FD: 135 BD: 48 +.+.: team->team_lock_key#82 ->&rq->__lock ->fs_reclaim ->netpoll_srcu ->net_rwsem ->&tn->lock ->_xmit_ETHER ->&dir->lock#2 ->input_pool.lock ->&c->lock ->&ndev->lock ->&obj_hash[i].lock ->nl_table_lock ->nl_table_wait.lock ->&in_dev->mc_tomb_lock ->&im->lock ->cbs_list_lock ->sysfs_symlink_target_lock ->lock ->&root->kernfs_rwsem ->&n->list_lock ->lweventlist_lock ->(console_sem).lock ->&sem->wait_lock ->&p->pi_lock ->&cfs_rq->removed.lock ->pool_lock#2 ->&____s->seqcount#2 ->&____s->seqcount FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#423 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1262 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#578 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1286 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#949 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#352 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#887 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#380 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#644 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#329 FD: 160 BD: 1 +.+.: (wq_completion)hci2#9 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) ->(work_completion)(&(&conn->disc_work)->work) FD: 1 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#587 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#914 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#916 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1306 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#900 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#894 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#339 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#889 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1583 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1200 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#630 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#950 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1128 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#230 FD: 85 BD: 48 +.+.: (wq_completion)wg-crypt-wg1#78 ->(work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) ->(work_completion)(&peer->transmit_packet_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#8 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1327 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1308 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1321 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1334 FD: 25 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#12 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#8 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1345 ->&rq->__lock FD: 1 BD: 2 +.+.: (work_completion)(&runtime->event_work) FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#231 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1349 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1363 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#232 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#642 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#653 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1396 FD: 25 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#14 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1413 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#673 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#248 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#663 FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_rx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#13 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#162 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#12 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#681 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#254 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#683 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc40_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#682 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1447 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1456 FD: 25 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#16 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#11 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1482 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1484 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#429 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#985 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1545 ->&rq->__lock FD: 160 BD: 1 +.+.: (wq_completion)hci1#4 ->(work_completion)(&hdev->cmd_work) ->(work_completion)(&hdev->rx_work) ->(work_completion)(&hdev->tx_work) ->(work_completion)(&conn->pending_rx_work) ->(work_completion)(&(&hdev->cmd_timer)->work) FD: 707 BD: 13 +.+.: &devlink->lock_key#84 ->crngs.lock ->fs_reclaim ->devlinks.xa_lock ->&xa->xa_lock#19 ->&c->lock ->&n->list_lock ->pcpu_alloc_mutex ->&obj_hash[i].lock ->&base->lock ->pin_fs_lock ->&sb->s_type->i_mutex_key#3 ->batched_entropy_u32.lock ->rtnl_mutex ->&rq->__lock ->&(&fn_net->fib_chain)->lock ->stack_depot_init_mutex ->remove_cache_srcu ->&____s->seqcount#2 ->&____s->seqcount ->rtnl_mutex.wait_lock ->&p->pi_lock ->&devlink_port->type_lock ->&nsim_trap_data->trap_lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#884 FD: 52 BD: 48 +.+.: (wq_completion)wg-kex-wg0#159 ->(work_completion)(&peer->transmit_handshake_work) FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#334 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#893 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1067 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1308 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#933 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#935 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#941 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#361 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#967 FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#956 FD: 1 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#963 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1093 ->&rq->__lock FD: 3 BD: 2 +.+.: &card->files_lock ->shutdown_lock ->&card->remove_sleep FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#883 FD: 52 BD: 1 +.+.: (wq_completion)wg-kex-wg1#139 ->(work_completion)(&peer->transmit_handshake_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#479 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#330 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#90 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#326 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#3 FD: 1 BD: 197 +.+.: iattr_mutex.wait_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#3 FD: 1 BD: 2 +.+.: &ping_table.lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#753 FD: 1 BD: 48 ....: &asoc->wait FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#111 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#932 FD: 25 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#3 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#428 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#3 FD: 25 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#3 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#641 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#4 FD: 25 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#3 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#4 FD: 25 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#4 FD: 25 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#4 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#4 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#878 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#878 ->&rq->__lock FD: 1 BD: 2 +.+.: br_ioctl_mutex.wait_lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1509 ->&rq->__lock FD: 30 BD: 1 ..-.: net/wireless/reg.c:236 FD: 172 BD: 1 +.+.: (wq_completion)hci1 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1623 FD: 26 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1622 ->rcu_node_0 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#792 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#11 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1376 ->&rq->__lock FD: 172 BD: 1 +.+.: (wq_completion)hci2#8 ->(work_completion)(&hdev->power_on) ->(work_completion)(&hdev->cmd_sync_work) FD: 1 BD: 5 +.+.: (wq_completion)tipc_send#8 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#759 ->&rq->__lock FD: 36 BD: 1 +.+.: (wq_completion)bond0#83 ->(work_completion)(&(&slave->notify_work)->work) FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1008 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1003 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#988 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#988 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#396 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#996 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#999 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1000 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#122 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1007 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1021 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1019 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1018 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#18 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1028 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1618 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1617 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#14 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1534 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#980 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1182 FD: 29 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1614 ->&rq->__lock ->stock_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock ->&cfs_rq->removed.lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1612 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1613 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1609 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1609 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1607 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1609 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#778 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1607 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1606 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1601 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#471 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1601 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#653 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1602 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1598 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1596 ->&rq->__lock FD: 135 BD: 48 +.+.: team->team_lock_key#84 ->fs_reclaim ->netpoll_srcu ->net_rwsem ->&tn->lock ->_xmit_ETHER ->&dir->lock#2 ->input_pool.lock ->&c->lock ->&n->list_lock ->&ndev->lock ->&obj_hash[i].lock ->nl_table_lock ->nl_table_wait.lock ->&rq->__lock ->&in_dev->mc_tomb_lock ->&im->lock ->cbs_list_lock ->sysfs_symlink_target_lock ->lock ->&root->kernfs_rwsem ->lweventlist_lock ->(console_sem).lock ->batched_entropy_u8.lock ->kfence_freelist_lock ->&meta->lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1602 FD: 1 BD: 5 +.+.: (wq_completion)tipc_rcv#10 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1595 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1592 ->&rq->__lock FD: 1 BD: 47 ....: key#26 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1590 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1589 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1590 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1588 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1589 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1587 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1588 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1585 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1586 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1585 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1584 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1453 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#891 FD: 1 BD: 3 +.+.: shutdown_lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#141 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc35_nci_cmd_wq#5 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1167 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#493 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1581 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#264 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1583 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1580 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1582 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1579 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1578 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1580 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1578 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1579 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1577 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1576 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1575 FD: 60 BD: 1 .+.+: sb_writers#13 ->mount_lock ->tk_core.seq.seqcount ->&sb->s_type->i_lock_key#27 ->&wb->list_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1571 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1568 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#603 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1335 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1334 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1334 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#215 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#212 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#50 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#46 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#607 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#604 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#46 FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#25 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#25 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1335 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1335 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#615 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1337 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1336 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1336 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#216 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#211 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#605 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1338 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1337 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1337 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1338 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1338 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#606 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#217 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#214 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#212 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1340 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1339 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#51 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#47 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#47 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1341 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1340 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1340 FD: 25 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#10 FD: 25 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#10 FD: 25 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#7 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#8 FD: 25 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#6 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#6 FD: 25 BD: 1 +.+.: (wq_completion)nfc35_nci_rx_wq#5 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc37_nci_cmd_wq#6 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#617 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#610 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_rx_wq#6 FD: 25 BD: 1 +.+.: (wq_completion)nfc37_nci_tx_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#607 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1342 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1341 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1341 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_tx_wq#7 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#218 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#215 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#213 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#52 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#48 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#48 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#26 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#26 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#26 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#18 FD: 25 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#10 FD: 25 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#11 FD: 25 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#14 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#14 FD: 25 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#14 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#219 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#216 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#214 FD: 25 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#12 FD: 25 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#11 FD: 25 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#10 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#10 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#8 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#8 FD: 25 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#8 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#53 FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#8 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#49 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#8 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#12 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#49 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#6 FD: 25 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#19 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#7 FD: 25 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#9 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#9 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#8 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#8 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#6 FD: 25 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#6 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#7 FD: 25 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#7 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#8 FD: 25 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#8 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#8 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#618 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#611 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#608 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1343 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1342 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#27 FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#27 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#27 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#220 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#217 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#215 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#15 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#54 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#50 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#50 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#19 FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#13 FD: 25 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#13 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#12 FD: 25 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#12 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#11 FD: 25 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#11 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#13 FD: 25 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#13 FD: 25 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#12 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#9 FD: 25 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#9 FD: 25 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#10 FD: 25 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#10 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#13 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#12 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#9 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#9 FD: 25 BD: 2 +.+.: (wq_completion)nfc38_nci_cmd_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc38_nci_rx_wq#4 FD: 25 BD: 1 +.+.: (wq_completion)nfc38_nci_tx_wq#4 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#7 FD: 25 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#7 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#6 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#6 FD: 25 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#6 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#7 FD: 25 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#11 FD: 25 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#11 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#8 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#8 FD: 25 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#8 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#16 FD: 25 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#14 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#21 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#20 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#55 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#51 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#51 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#28 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#28 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#221 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#218 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#216 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1344 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1343 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1343 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#619 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#612 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#609 FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#10 FD: 25 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1344 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#620 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#613 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1344 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#610 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1345 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1347 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#611 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1346 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1346 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#222 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#219 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#217 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#615 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#612 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#223 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#220 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#218 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1348 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1347 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1347 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#616 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#613 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#224 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#219 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1348 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1348 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#624 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#617 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#614 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1349 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#225 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#222 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#220 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#618 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#615 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#56 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#52 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#52 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1351 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1350 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#626 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#619 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#616 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#627 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#620 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#226 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#223 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#221 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_rx_wq#621 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#628 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#57 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#53 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1352 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1351 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1351 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1352 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1352 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#629 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#622 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#618 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1354 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1353 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1353 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1354 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1354 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#623 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#619 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#227 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#224 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#222 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1355 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1355 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#624 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#620 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1357 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#632 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#625 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#621 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1356 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1357 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1357 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#633 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#626 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#622 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#225 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#223 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1359 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1358 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#627 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#623 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1358 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#628 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#624 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1360 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1359 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1359 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1360 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#636 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#629 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#625 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1362 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1361 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1361 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#229 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#630 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#226 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#224 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#626 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1362 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#638 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1364 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#627 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1363 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1363 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1364 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1364 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#227 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#225 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1366 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#632 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#628 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1365 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1365 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#228 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#226 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#633 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#629 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1366 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1366 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1367 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1367 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#641 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#630 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1369 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1368 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1368 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1369 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#232 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#229 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#227 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#631 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#58 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#54 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#54 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1371 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1370 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#643 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#636 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#632 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1371 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1371 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#644 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#637 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#633 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#645 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1372 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1372 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#638 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#634 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#230 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#228 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1373 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#639 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1375 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#635 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1374 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1374 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1375 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1377 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1376 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1377 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1377 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1378 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1378 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#647 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#640 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#636 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#231 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#229 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#239 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#236 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#234 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1391 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1390 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1390 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#654 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#647 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#643 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1391 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#240 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#237 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#235 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#655 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#644 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1393 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1392 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#656 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#649 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#645 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1393 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1393 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#657 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#650 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#646 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#238 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#236 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1395 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#60 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#651 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#55 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#647 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#55 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1394 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1396 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1395 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1395 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#652 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#648 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1397 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1396 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1398 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1397 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1397 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#660 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#649 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1399 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#661 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#654 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1398 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#242 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#239 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#237 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1399 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1399 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#662 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#655 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#651 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1401 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1400 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#663 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#656 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#652 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1401 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1401 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1403 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1402 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1402 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#664 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#657 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#653 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#240 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#238 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1404 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1403 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1403 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#665 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#658 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#654 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#241 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#239 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1404 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1404 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#666 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#659 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#655 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1405 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1405 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#667 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#660 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#656 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#240 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#22 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#56 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#21 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#21 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#56 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#13 FD: 25 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#13 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#14 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#15 FD: 25 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#10 FD: 25 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#16 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#16 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#7 FD: 25 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#7 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#9 FD: 25 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#12 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1406 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#668 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#661 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#657 FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#29 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#29 FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#29 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#246 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#243 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#241 FD: 25 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#17 FD: 25 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#17 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#17 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#62 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#57 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#57 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#23 FD: 25 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#22 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#22 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#12 FD: 25 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#15 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#15 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#15 FD: 25 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#10 FD: 25 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#11 FD: 25 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#14 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#17 FD: 1 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#7 FD: 25 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#7 FD: 1 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#9 FD: 25 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#8 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1407 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#669 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#662 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#658 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#670 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1408 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#663 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#659 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1408 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#664 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#660 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1410 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1409 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#672 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#665 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#661 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1410 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1410 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1412 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#666 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1411 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1411 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#662 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1412 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1412 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1414 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1413 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#674 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#667 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1415 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#675 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1414 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1414 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#668 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#664 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#669 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1416 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#665 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1415 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1415 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#247 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#244 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#242 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1417 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1416 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1416 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#245 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#243 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1417 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#63 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#58 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1417 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#58 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#677 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#249 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#670 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#666 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#246 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#244 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1418 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1418 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#671 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#667 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1420 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1419 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1421 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1420 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1420 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#679 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#668 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1422 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1421 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1421 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#673 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#669 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1422 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1422 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#16 FD: 25 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#16 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#16 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1423 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1424 FD: 25 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#13 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1426 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1425 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1425 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#19 FD: 25 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#11 FD: 25 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#11 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#12 FD: 25 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#8 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_tx_wq#9 FD: 25 BD: 2 +.+.: (wq_completion)nfc21_nci_rx_wq#9 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#8 FD: 25 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#8 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#250 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#247 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#245 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#64 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#59 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#59 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#30 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#30 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#30 FD: 25 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#23 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#23 FD: 25 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#18 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#18 FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#17 FD: 25 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#17 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#17 FD: 25 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#16 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#16 FD: 25 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#16 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#16 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#13 FD: 25 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1426 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1426 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#21 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#20 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#12 FD: 25 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#12 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#12 FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#13 FD: 25 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#13 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#9 FD: 25 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#9 FD: 25 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#9 FD: 25 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#9 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#8 FD: 25 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc42_nci_cmd_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc42_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc42_nci_tx_wq#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#682 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#675 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#671 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#251 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#248 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#246 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#65 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#60 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#60 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#31 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#31 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#31 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#25 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#24 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#24 FD: 25 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#19 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#19 FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#18 FD: 1 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#17 FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#17 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#15 FD: 25 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#14 FD: 25 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#14 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1428 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1427 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1427 FD: 1 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#22 FD: 25 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#21 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#21 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#13 FD: 25 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#13 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#14 FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc50_nci_cmd_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc50_nci_rx_wq FD: 1 BD: 2 +.+.: (wq_completion)nfc49_nci_cmd_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc49_nci_rx_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc49_nci_tx_wq FD: 1 BD: 2 +.+.: (wq_completion)nfc48_nci_cmd_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc48_nci_rx_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc48_nci_tx_wq FD: 1 BD: 2 +.+.: (wq_completion)nfc47_nci_cmd_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc47_nci_rx_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc47_nci_tx_wq FD: 1 BD: 2 +.+.: (wq_completion)nfc46_nci_cmd_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc46_nci_rx_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc46_nci_tx_wq FD: 1 BD: 2 +.+.: (wq_completion)nfc45_nci_cmd_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc45_nci_rx_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc45_nci_tx_wq FD: 1 BD: 2 +.+.: (wq_completion)nfc44_nci_cmd_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc44_nci_rx_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc44_nci_tx_wq FD: 1 BD: 2 +.+.: (wq_completion)nfc43_nci_cmd_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc43_nci_rx_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc43_nci_tx_wq FD: 1 BD: 2 +.+.: (wq_completion)nfc41_nci_cmd_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc41_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc41_nci_tx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc40_nci_rx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc40_nci_tx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc39_nci_cmd_wq#4 FD: 25 BD: 1 +.+.: (wq_completion)nfc39_nci_rx_wq#4 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc39_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc38_nci_cmd_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc38_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc38_nci_tx_wq#5 FD: 1 BD: 2 +.+.: (wq_completion)nfc37_nci_cmd_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_rx_wq#7 FD: 25 BD: 1 +.+.: (wq_completion)nfc37_nci_tx_wq#7 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc36_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_rx_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_tx_wq#8 FD: 25 BD: 2 +.+.: (wq_completion)nfc35_nci_cmd_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_tx_wq#6 FD: 1 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#9 FD: 25 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#9 FD: 1 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#8 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#8 FD: 25 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#8 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#9 FD: 25 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#9 FD: 25 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#9 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#247 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#66 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1429 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1428 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1428 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#61 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#676 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#253 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#61 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#250 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#672 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#248 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1429 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1429 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#677 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#673 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#251 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#249 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#67 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#62 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#62 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1431 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#685 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#678 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1430 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1430 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#674 FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#32 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#32 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#255 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#252 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#250 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1431 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1431 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1432 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1432 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#686 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#679 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#675 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1434 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1433 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#687 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#680 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1435 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#676 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1434 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1434 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1435 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#688 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#681 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1435 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#256 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#251 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1437 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#678 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1436 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1436 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#257 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#254 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#252 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1438 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#68 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#63 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#63 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1437 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1437 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1438 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#690 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#683 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#679 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#684 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#680 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1440 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#692 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1439 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#685 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#681 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1441 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#693 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#686 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#682 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1440 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#258 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#255 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#253 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1442 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1441 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#694 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#687 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#683 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1443 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1442 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1443 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#695 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1443 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#688 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#684 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#254 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#696 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#689 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#69 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#685 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#64 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#64 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1445 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1444 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#260 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#257 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#697 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#255 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#690 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#686 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1446 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1445 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1445 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1446 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1446 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#691 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#687 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#261 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#258 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#256 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#65 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#65 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1447 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1447 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#699 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#692 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#688 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#262 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#259 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#257 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1448 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#700 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#693 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#689 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#260 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#258 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1449 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1449 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#694 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#690 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#71 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#66 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#66 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#264 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1451 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#261 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#259 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1450 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1450 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#702 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#691 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#265 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#696 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#262 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#260 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#692 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1452 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1451 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#72 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#67 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#67 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1452 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1452 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#266 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#263 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#261 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1454 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1453 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1453 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#262 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#704 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#697 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#693 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1455 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1454 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1454 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#265 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#705 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#698 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#263 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#694 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1455 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1455 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#706 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#699 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#695 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#269 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#264 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1457 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1456 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1456 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#696 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#270 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#265 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1458 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1457 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#708 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#701 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#697 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1457 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#73 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#68 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#68 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#268 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#266 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1458 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1458 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#709 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#702 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#698 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1459 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1459 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1460 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1460 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#710 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#699 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1462 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1461 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1461 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#711 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#700 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1463 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1462 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1462 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1463 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1463 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#712 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#705 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#701 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1464 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1464 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#706 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#702 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1465 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#714 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#707 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#703 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1465 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#273 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#269 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#267 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1466 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#708 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#704 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1468 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1467 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1467 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#709 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#705 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#274 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#270 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#268 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1468 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1470 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1469 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1469 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1470 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#717 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#710 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#706 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1472 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1471 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#718 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#711 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1473 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1472 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1472 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#712 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#708 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1474 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1473 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1473 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#271 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#269 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#69 FD: 25 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#18 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#18 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#17 FD: 25 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#16 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#14 FD: 25 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#17 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#17 FD: 25 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#12 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#10 FD: 25 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#10 FD: 25 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#12 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1474 FD: 25 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1474 FD: 25 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#10 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#720 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#713 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#709 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#33 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#33 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#276 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#272 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1476 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#270 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1475 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#75 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#70 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#70 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1475 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#26 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#25 FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#19 FD: 25 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#20 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#20 FD: 25 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#18 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#18 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#18 FD: 25 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#22 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#22 FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#15 FD: 25 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#19 FD: 25 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#18 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#18 FD: 1 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#15 FD: 25 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#14 FD: 25 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#14 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#15 FD: 25 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#19 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#18 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#18 FD: 25 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#13 FD: 1 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#11 FD: 25 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#10 FD: 25 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc28_nci_tx_wq#15 FD: 25 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#10 FD: 25 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#13 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#13 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#13 FD: 25 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#11 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#721 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#714 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#710 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#277 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#273 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#271 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1477 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1476 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#76 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#71 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#71 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#34 FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#34 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#34 FD: 25 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#27 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#26 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#26 FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#20 FD: 25 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#21 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#21 FD: 25 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#21 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#19 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#19 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#19 FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#19 FD: 25 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#19 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#23 FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#23 FD: 25 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#16 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#16 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#16 FD: 25 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#16 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#16 FD: 25 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#16 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#15 FD: 1 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#19 FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#19 FD: 25 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#16 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#16 FD: 25 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#16 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#14 FD: 25 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#10 FD: 25 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#10 FD: 25 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#25 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#24 FD: 25 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#24 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc12_nci_cmd_wq#21 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#21 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#21 FD: 25 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#20 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#20 FD: 25 BD: 2 +.+.: (wq_completion)nfc10_nci_cmd_wq#20 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#20 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#20 FD: 25 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#22 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#22 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#22 FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#21 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#21 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#21 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#28 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#27 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#27 FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#35 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#35 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#77 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#72 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#72 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1478 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1477 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1477 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#278 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#274 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#272 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#722 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#715 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#711 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1478 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1478 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#716 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#712 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1479 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1479 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#724 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#717 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1480 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#713 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1480 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1481 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1481 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#725 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#279 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#275 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#714 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#273 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1483 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1482 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1484 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1483 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1483 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#280 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#726 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#719 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#715 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#276 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1485 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#274 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1484 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1485 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#727 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1485 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#720 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#716 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#281 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#277 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#275 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1486 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1486 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1488 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1487 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1488 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#721 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#717 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1490 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#282 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1489 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1489 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#278 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#276 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#730 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1492 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#722 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#718 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1491 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1491 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#283 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#277 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1492 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1492 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#731 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#284 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#723 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#280 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#719 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#278 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1493 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1493 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1494 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#732 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1496 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#724 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1495 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#285 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#281 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1495 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#279 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1496 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1496 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#733 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#725 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#721 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1498 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1497 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1497 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1498 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1498 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1499 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#734 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#726 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#722 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1500 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1500 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1501 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1501 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#727 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#723 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1503 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1502 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1502 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#286 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#282 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#736 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#724 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#280 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1503 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1503 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#737 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#729 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#725 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1505 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1504 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1504 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1506 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1505 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1505 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#281 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1506 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#738 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#726 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#739 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#731 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1507 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1507 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#727 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1508 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1508 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#740 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#732 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#288 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#728 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#284 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#282 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#733 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#729 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#734 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#730 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1510 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1510 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#289 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#283 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1512 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1511 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1511 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#743 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#735 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#731 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#286 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#284 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1513 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1512 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1512 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#736 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#732 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1513 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#745 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#737 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#733 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1513 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#291 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#287 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1515 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1514 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1514 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#746 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1515 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1515 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#738 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#734 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#288 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#286 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#747 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1516 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1516 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#739 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#735 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#748 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1517 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1517 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#740 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#736 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#78 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#73 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#73 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#289 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#287 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1519 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1518 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1518 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#749 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#741 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1520 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1519 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1519 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#294 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#290 FD: 29 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#288 ->&rq->__lock ->stock_lock ->&obj_hash[i].lock ->key ->pcpu_lock ->percpu_counters_lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#74 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#74 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1521 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1520 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#750 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#742 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#738 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1522 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1521 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#743 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#739 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1521 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#295 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#291 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#289 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#744 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#740 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1523 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1522 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#296 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#292 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1522 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#290 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#80 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1524 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#75 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#75 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1523 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#753 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#745 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#754 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#746 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#742 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#36 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#36 FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#37 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#37 FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#38 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#38 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#38 FD: 25 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#21 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#21 FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#17 FD: 1 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#17 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#15 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#15 FD: 25 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#12 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#13 FD: 1 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#13 FD: 25 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#11 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#11 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#9 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#9 FD: 25 BD: 2 +.+.: (wq_completion)nfc36_nci_cmd_wq#9 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc39_nci_rx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc39_nci_tx_wq#5 FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_rx_wq#9 FD: 25 BD: 1 +.+.: (wq_completion)nfc36_nci_tx_wq#9 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc42_nci_rx_wq#3 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1525 FD: 1 BD: 1 +.+.: (wq_completion)nfc49_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1524 FD: 1 BD: 1 +.+.: (wq_completion)nfc49_nci_tx_wq#2 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#297 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#293 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#291 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#81 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#76 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#76 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#755 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#743 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#29 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#28 FD: 25 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#28 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#22 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#22 FD: 25 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#22 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#757 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#748 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#744 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#39 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#39 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#39 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc11_nci_cmd_wq#21 FD: 25 BD: 1 +.+.: (wq_completion)nfc11_nci_rx_wq#21 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc11_nci_tx_wq#21 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_rx_wq#22 FD: 1 BD: 1 +.+.: (wq_completion)nfc12_nci_tx_wq#22 FD: 25 BD: 2 +.+.: (wq_completion)nfc15_nci_cmd_wq#17 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_tx_wq#40 FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_rx_wq#40 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#40 FD: 25 BD: 1 +.+.: (wq_completion)nfc15_nci_rx_wq#17 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc15_nci_tx_wq#17 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1526 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1525 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1525 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc9_nci_cmd_wq#23 FD: 1 BD: 1 +.+.: (wq_completion)nfc9_nci_rx_wq#23 FD: 25 BD: 1 +.+.: (wq_completion)nfc9_nci_tx_wq#23 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc14_nci_cmd_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_rx_wq#22 FD: 1 BD: 1 +.+.: (wq_completion)nfc10_nci_tx_wq#22 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#41 FD: 25 BD: 1 +.+.: (wq_completion)nfc14_nci_rx_wq#18 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc14_nci_tx_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#41 FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#41 FD: 25 BD: 2 +.+.: (wq_completion)nfc20_nci_cmd_wq#11 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1527 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc20_nci_tx_wq#11 FD: 25 BD: 2 +.+.: (wq_completion)nfc52_nci_cmd_wq ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc52_nci_rx_wq ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc52_nci_tx_wq ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc21_nci_cmd_wq#13 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc21_nci_rx_wq#12 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc21_nci_tx_wq#12 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#82 FD: 25 BD: 2 +.+.: (wq_completion)nfc19_nci_cmd_wq#16 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#77 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#77 FD: 1 BD: 1 +.+.: (wq_completion)nfc19_nci_rx_wq#16 FD: 25 BD: 1 +.+.: (wq_completion)nfc19_nci_tx_wq#16 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#42 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#42 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc23_nci_cmd_wq#12 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc51_nci_cmd_wq ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc23_nci_rx_wq#12 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc23_nci_tx_wq#12 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc24_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc8_nci_cmd_wq#23 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_rx_wq#23 FD: 1 BD: 1 +.+.: (wq_completion)nfc8_nci_tx_wq#23 FD: 1 BD: 1 +.+.: (wq_completion)nfc51_nci_rx_wq FD: 1 BD: 1 +.+.: (wq_completion)nfc51_nci_tx_wq FD: 1 BD: 2 +.+.: (wq_completion)nfc48_nci_cmd_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc24_nci_rx_wq#11 FD: 25 BD: 1 +.+.: (wq_completion)nfc24_nci_tx_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc48_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc48_nci_tx_wq#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc47_nci_cmd_wq#2 FD: 25 BD: 1 +.+.: (wq_completion)nfc47_nci_rx_wq#2 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc28_nci_cmd_wq#16 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc47_nci_tx_wq#2 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc28_nci_rx_wq#16 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 2 +.+.: (wq_completion)nfc45_nci_cmd_wq#2 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc45_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc45_nci_tx_wq#2 FD: 25 BD: 2 +.+.: (wq_completion)nfc43_nci_cmd_wq#2 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc29_nci_cmd_wq#11 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc43_nci_rx_wq#2 FD: 25 BD: 1 +.+.: (wq_completion)nfc29_nci_rx_wq#11 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc43_nci_tx_wq#2 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc29_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc30_nci_cmd_wq#11 FD: 25 BD: 2 +.+.: (wq_completion)nfc39_nci_cmd_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc39_nci_rx_wq#6 FD: 25 BD: 1 +.+.: (wq_completion)nfc30_nci_rx_wq#11 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc39_nci_tx_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc30_nci_tx_wq#11 FD: 25 BD: 2 +.+.: (wq_completion)nfc34_nci_cmd_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc34_nci_tx_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc37_nci_cmd_wq#8 FD: 25 BD: 2 +.+.: (wq_completion)nfc25_nci_cmd_wq#12 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc25_nci_rx_wq#12 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc37_nci_rx_wq#8 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc25_nci_tx_wq#12 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc37_nci_tx_wq#8 FD: 1 BD: 2 +.+.: (wq_completion)nfc40_nci_cmd_wq#4 FD: 25 BD: 2 +.+.: (wq_completion)nfc38_nci_cmd_wq#6 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc40_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc40_nci_tx_wq#4 FD: 25 BD: 2 +.+.: (wq_completion)nfc35_nci_cmd_wq#7 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_rx_wq#7 FD: 1 BD: 1 +.+.: (wq_completion)nfc35_nci_tx_wq#7 FD: 25 BD: 1 +.+.: (wq_completion)nfc38_nci_rx_wq#6 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc32_nci_cmd_wq#14 FD: 25 BD: 1 +.+.: (wq_completion)nfc32_nci_rx_wq#14 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 1 +.+.: (wq_completion)nfc32_nci_tx_wq#14 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc33_nci_cmd_wq#10 FD: 1 BD: 2 +.+.: (wq_completion)nfc31_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc31_nci_tx_wq#11 FD: 25 BD: 2 +.+.: (wq_completion)nfc26_nci_cmd_wq#15 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc26_nci_rx_wq#15 FD: 25 BD: 1 +.+.: (wq_completion)nfc26_nci_tx_wq#15 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc27_nci_cmd_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc33_nci_rx_wq#10 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_rx_wq#11 FD: 1 BD: 1 +.+.: (wq_completion)nfc27_nci_tx_wq#11 FD: 1 BD: 2 +.+.: (wq_completion)nfc22_nci_cmd_wq#14 FD: 25 BD: 1 +.+.: (wq_completion)nfc33_nci_tx_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_rx_wq#14 FD: 1 BD: 1 +.+.: (wq_completion)nfc22_nci_tx_wq#14 FD: 25 BD: 2 +.+.: (wq_completion)nfc18_nci_cmd_wq#18 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_rx_wq#18 FD: 1 BD: 1 +.+.: (wq_completion)nfc18_nci_tx_wq#18 FD: 25 BD: 2 +.+.: (wq_completion)nfc17_nci_cmd_wq#21 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc36_nci_rx_wq#10 FD: 25 BD: 1 +.+.: (wq_completion)nfc17_nci_rx_wq#20 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc36_nci_tx_wq#10 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc17_nci_tx_wq#20 FD: 25 BD: 2 +.+.: (wq_completion)nfc16_nci_cmd_wq#16 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc16_nci_rx_wq#16 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 1 +.+.: (wq_completion)nfc16_nci_tx_wq#16 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc41_nci_cmd_wq#3 FD: 25 BD: 2 +.+.: (wq_completion)nfc13_nci_cmd_wq#26 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc13_nci_rx_wq#25 FD: 25 BD: 1 +.+.: (wq_completion)nfc13_nci_tx_wq#25 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc41_nci_rx_wq#3 FD: 25 BD: 1 +.+.: (wq_completion)nfc41_nci_tx_wq#3 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc44_nci_cmd_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc44_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc44_nci_tx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc42_nci_rx_wq#4 FD: 1 BD: 1 +.+.: (wq_completion)nfc42_nci_tx_wq#4 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#298 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#294 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#292 FD: 1 BD: 2 +.+.: (wq_completion)nfc7_nci_cmd_wq#30 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_rx_wq#29 FD: 1 BD: 1 +.+.: (wq_completion)nfc7_nci_tx_wq#29 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#758 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#749 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#745 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1528 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1526 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1526 FD: 25 BD: 2 +.+.: (wq_completion)nfc49_nci_cmd_wq#3 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc49_nci_rx_wq#3 FD: 1 BD: 1 +.+.: (wq_completion)nfc49_nci_tx_wq#3 FD: 1 BD: 2 +.+.: (wq_completion)nfc50_nci_cmd_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc50_nci_rx_wq#2 FD: 1 BD: 1 +.+.: (wq_completion)nfc50_nci_tx_wq#2 FD: 1 BD: 2 +.+.: (wq_completion)nfc46_nci_cmd_wq#2 FD: 25 BD: 1 +.+.: (wq_completion)nfc46_nci_rx_wq#2 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1529 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1527 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1527 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#750 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1530 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#746 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1528 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1528 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#760 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#299 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#295 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#751 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#293 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#747 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1531 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1529 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#761 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#752 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#748 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1530 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1530 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#762 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#300 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#753 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#749 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1533 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#296 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1531 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#294 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1531 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#78 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#78 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#763 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#754 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1532 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#750 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#301 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#297 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#295 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1535 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1533 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1533 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#764 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#755 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#751 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1534 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1534 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#765 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#752 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1537 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1535 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1535 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#302 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#298 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#296 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1538 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1536 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1539 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1537 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1537 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#766 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#757 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1540 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1538 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1538 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#758 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#754 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#303 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#84 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#299 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#297 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#79 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#79 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#768 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#759 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1541 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1539 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1539 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1542 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1540 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1541 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#85 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1541 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#80 FD: 1 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#43 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#80 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc6_nci_cmd_wq#44 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc6_nci_rx_wq#43 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc6_nci_tx_wq#43 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1544 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#300 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1542 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#298 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1542 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#769 FD: 1 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#87 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#81 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#760 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#756 FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#81 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1543 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1543 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#761 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#757 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#305 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#301 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#299 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#88 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#82 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#82 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1544 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1544 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#762 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#758 FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#89 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_rx_wq#83 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#83 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#306 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#302 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#300 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1545 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#772 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#763 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#759 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1548 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1546 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1546 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#303 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#301 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#773 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#764 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#760 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#765 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#761 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#308 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#304 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1549 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#302 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc5_nci_cmd_wq#90 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1547 FD: 25 BD: 1 +.+.: (wq_completion)nfc5_nci_tx_wq#84 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1547 FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#775 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#762 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1548 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1548 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#309 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#776 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#305 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#767 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#303 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#763 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1549 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1549 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1552 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1550 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1550 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1551 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#768 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#764 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1554 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1552 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1552 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#778 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#769 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#765 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1554 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#306 FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#304 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1555 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1555 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1556 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1557 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1557 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#770 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#766 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1560 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1558 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1561 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1559 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1562 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1560 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1560 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1561 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1561 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1564 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1562 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1562 FD: 25 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#311 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#307 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#305 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1563 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1563 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#772 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#768 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1566 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1564 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#783 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1564 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#773 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1565 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1565 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#774 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#770 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1568 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1566 FD: 1 BD: 2 +.+.: (wq_completion)nfc4_nci_cmd_wq#312 FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#308 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#306 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#785 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#771 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1569 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1567 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1567 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1594 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1594 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1595 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1603 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1603 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1604 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#777 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#773 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1605 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1605 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#788 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#774 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#779 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#775 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#307 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1608 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1606 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1606 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1608 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1608 FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#790 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#780 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#776 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1610 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1610 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1611 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1612 FD: 1 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1615 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1613 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1613 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1614 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1614 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1615 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1615 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1616 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#791 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#781 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1619 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1617 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1617 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#782 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#778 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1618 FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1618 FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1621 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#783 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#779 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1619 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1619 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc4_nci_rx_wq#310 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc4_nci_tx_wq#308 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1620 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1620 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1621 ->&rq->__lock FD: 1 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#794 FD: 25 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#784 ->&rq->__lock FD: 25 BD: 2 +.+.: (wq_completion)nfc2_nci_cmd_wq#1624 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#780 FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_rx_wq#1622 ->&rq->__lock FD: 25 BD: 1 +.+.: (wq_completion)nfc2_nci_tx_wq#1622 ->&rq->__lock ->&cfs_rq->removed.lock ->&obj_hash[i].lock FD: 25 BD: 2 +.+.: (wq_completion)nfc3_nci_cmd_wq#795 ->&rq->__lock FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_rx_wq#785 FD: 1 BD: 1 +.+.: (wq_completion)nfc3_nci_tx_wq#781 all lock chains: irq_context: 0 (console_sem).lock irq_context: 0 &obj_hash[i].lock irq_context: 0 &obj_hash[i].lock pool_lock irq_context: 0 cgroup_mutex irq_context: 0 fixmap_lock irq_context: 0 cpu_hotplug_lock irq_context: 0 cpu_hotplug_lock jump_label_mutex irq_context: 0 console_mutex irq_context: 0 input_pool.lock irq_context: 0 base_crng.lock irq_context: 0 cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 crng_init_wait.lock irq_context: 0 early_pfn_lock irq_context: 0 devtree_lock irq_context: 0 resource_lock irq_context: 0 restart_handler_list.lock irq_context: 0 system_transition_mutex irq_context: 0 pcpu_lock irq_context: 0 debug_hook_lock irq_context: 0 zonelist_update_seq irq_context: 0 zonelist_update_seq zonelist_update_seq.seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 &zone->lock irq_context: 0 &zone->lock &____s->seqcount irq_context: 0 &pcp->lock &zone->lock irq_context: 0 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &____s->seqcount irq_context: 0 pool_lock#2 irq_context: 0 pcpu_alloc_mutex irq_context: 0 pcpu_alloc_mutex pcpu_lock irq_context: 0 &n->list_lock irq_context: 0 &c->lock irq_context: 0 slab_mutex irq_context: 0 slab_mutex pool_lock#2 irq_context: 0 slab_mutex pcpu_alloc_mutex irq_context: 0 slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 trace_types_lock irq_context: 0 panic_notifier_list.lock irq_context: 0 die_chain.lock irq_context: 0 trace_event_sem irq_context: 0 batched_entropy_u32.lock irq_context: 0 batched_entropy_u32.lock crngs.lock irq_context: 0 batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 sysctl_lock irq_context: 0 &rq->__lock irq_context: 0 &rq->__lock rcu_read_lock &cfs_b->lock irq_context: 0 init_task.pi_lock irq_context: 0 init_task.pi_lock &rq->__lock irq_context: 0 init_task.vtime_seqcount irq_context: 0 slab_mutex &c->lock irq_context: 0 slab_mutex &pcp->lock &zone->lock irq_context: 0 slab_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 slab_mutex &____s->seqcount irq_context: 0 wq_pool_mutex irq_context: 0 wq_pool_mutex &pcp->lock &zone->lock irq_context: 0 wq_pool_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 wq_pool_mutex &____s->seqcount irq_context: 0 wq_pool_mutex &c->lock irq_context: 0 wq_pool_mutex pool_lock#2 irq_context: 0 &wq->mutex irq_context: 0 &wq->mutex &pool->lock irq_context: 0 wq_pool_mutex &wq->mutex irq_context: 0 cpu_hotplug_lock wq_pool_mutex irq_context: 0 cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 shrinker_mutex irq_context: 0 rcu_node_0 irq_context: 0 rcu_state.barrier_lock irq_context: 0 rcu_state.barrier_lock rcu_node_0 irq_context: 0 &rnp->exp_poll_lock irq_context: 0 &rnp->exp_poll_lock rcu_read_lock &pool->lock irq_context: 0 &rnp->exp_poll_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 trace_event_sem trace_event_ida.xa_lock irq_context: 0 trace_event_sem trace_event_ida.xa_lock &pcp->lock &zone->lock irq_context: 0 trace_event_sem trace_event_ida.xa_lock &____s->seqcount irq_context: 0 trace_event_sem trace_event_ida.xa_lock &c->lock irq_context: 0 trace_event_sem trace_event_ida.xa_lock pool_lock#2 irq_context: 0 trigger_cmd_mutex irq_context: 0 free_vmap_area_lock irq_context: 0 vmap_area_lock irq_context: 0 acpi_probe_mutex irq_context: 0 acpi_probe_mutex pool_lock#2 irq_context: 0 acpi_probe_mutex free_vmap_area_lock irq_context: 0 acpi_probe_mutex vmap_area_lock irq_context: 0 acpi_probe_mutex &pcp->lock &zone->lock irq_context: 0 acpi_probe_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 acpi_probe_mutex &____s->seqcount irq_context: 0 acpi_probe_mutex init_mm.page_table_lock irq_context: 0 acpi_probe_mutex resource_lock irq_context: 0 acpi_probe_mutex &c->lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 acpi_probe_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 acpi_probe_mutex (console_sem).lock irq_context: 0 acpi_probe_mutex irq_domain_mutex irq_context: 0 acpi_probe_mutex pcpu_alloc_mutex irq_context: 0 acpi_probe_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 acpi_probe_mutex irq_domain_mutex pool_lock#2 irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 acpi_probe_mutex &domain->mutex irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock &pcp->lock &zone->lock irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock &____s->seqcount irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock pool_lock#2 irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock &c->lock irq_context: 0 acpi_probe_mutex &domain->mutex sparse_irq_lock &obj_hash[i].lock pool_lock irq_context: 0 acpi_probe_mutex &domain->mutex pool_lock#2 irq_context: 0 acpi_probe_mutex &domain->mutex &irq_desc_lock_class irq_context: 0 acpi_probe_mutex &desc->request_mutex irq_context: 0 acpi_probe_mutex &desc->request_mutex &irq_desc_lock_class irq_context: 0 acpi_probe_mutex &irq_desc_lock_class irq_context: 0 acpi_probe_mutex cpu_pm_notifier.lock irq_context: 0 acpi_probe_mutex &obj_hash[i].lock irq_context: 0 acpi_probe_mutex purge_vmap_area_lock irq_context: 0 acpi_probe_mutex iort_msi_chip_lock irq_context: 0 acpi_probe_mutex &zone->lock irq_context: 0 acpi_probe_mutex &zone->lock &____s->seqcount irq_context: 0 acpi_probe_mutex its_lock irq_context: 0 acpi_probe_mutex resource_lock irq_context: 0 acpi_probe_mutex efi_mem_reserve_persistent_lock irq_context: 0 acpi_probe_mutex lpi_range_lock irq_context: 0 acpi_probe_mutex syscore_ops_lock irq_context: 0 acpi_probe_mutex its_lock &its->lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex cpuhp_state-down irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex cpuhp_state-up irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex resource_lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex pool_lock#2 irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex resource_lock irq_context: 0 timekeeper_lock irq_context: 0 timekeeper_lock tk_core.seq.seqcount irq_context: 0 timekeeper_lock tk_core.seq.seqcount &obj_hash[i].lock irq_context: 0 acpi_probe_mutex &desc->request_mutex &irq_desc_lock_class irq_controller_lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex (console_sem).lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex clockevents_lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex clockevents_lock tk_core.seq.seqcount irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex clockevents_lock tick_broadcast_lock irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex clockevents_lock jiffies_seq.seqcount irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex &irq_desc_lock_class irq_context: 0 acpi_probe_mutex cpu_hotplug_lock cpuhp_state_mutex &irq_desc_lock_class irq_controller_lock irq_context: 0 acpi_probe_mutex clocksource_mutex irq_context: 0 clockevents_lock irq_context: 0 tk_core.seq.seqcount irq_context: 0 &base->lock irq_context: 0 &base->lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &pool->lock irq_context: 0 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 batched_entropy_u64.lock irq_context: 0 batched_entropy_u64.lock crngs.lock irq_context: 0 pmus_lock irq_context: 0 pmus_lock pcpu_alloc_mutex irq_context: 0 pmus_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 pmus_lock pool_lock#2 irq_context: 0 pmus_lock &obj_hash[i].lock irq_context: 0 &swhash->hlist_mutex irq_context: 0 pmus_lock &cpuctx_mutex irq_context: 0 pmus_lock &obj_hash[i].lock pool_lock irq_context: 0 tty_ldiscs_lock irq_context: 0 console_lock irq_context: 0 console_lock pool_lock#2 irq_context: 0 console_lock &obj_hash[i].lock irq_context: 0 console_lock &pcp->lock &zone->lock irq_context: 0 console_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 console_lock &____s->seqcount irq_context: 0 console_lock &c->lock irq_context: 0 console_lock kbd_event_lock irq_context: 0 console_lock kbd_event_lock led_lock irq_context: 0 console_lock (console_sem).lock irq_context: 0 console_lock console_owner_lock irq_context: 0 init_task.alloc_lock irq_context: 0 acpi_ioremap_lock irq_context: 0 acpi_ioremap_lock pool_lock#2 irq_context: 0 semaphore->lock irq_context: 0 *(&acpi_gbl_reference_count_lock) irq_context: 0 hrtimer_bases.lock irq_context: 0 hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 percpu_counters_lock irq_context: 0 tomoyo_policy_lock irq_context: 0 tomoyo_policy_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem irq_context: 0 pernet_ops_rwsem stack_depot_init_mutex irq_context: 0 pernet_ops_rwsem crngs.lock irq_context: 0 pernet_ops_rwsem net_rwsem irq_context: 0 pernet_ops_rwsem proc_inum_ida.xa_lock irq_context: 0 rtnl_mutex irq_context: 0 rtnl_mutex &c->lock irq_context: 0 rtnl_mutex &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &____s->seqcount irq_context: 0 rtnl_mutex pool_lock#2 irq_context: 0 lock irq_context: 0 lock kernfs_idr_lock irq_context: 0 lock kernfs_idr_lock pool_lock#2 irq_context: 0 &root->kernfs_rwsem irq_context: 0 file_systems_lock irq_context: 0 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 dq_list_lock irq_context: 0 sb_lock irq_context: 0 &type->s_umount_key/1 irq_context: 0 &type->s_umount_key/1 pool_lock#2 irq_context: 0 &type->s_umount_key/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key/1 shrinker_mutex irq_context: 0 &type->s_umount_key/1 shrinker_mutex pool_lock#2 irq_context: 0 &type->s_umount_key/1 list_lrus_mutex irq_context: 0 &type->s_umount_key/1 sb_lock irq_context: 0 &type->s_umount_key/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key/1 &c->lock irq_context: 0 &type->s_umount_key/1 &____s->seqcount irq_context: 0 &type->s_umount_key/1 &obj_hash[i].lock irq_context: 0 &type->s_umount_key/1 percpu_counters_lock irq_context: 0 &type->s_umount_key/1 crngs.lock irq_context: 0 &type->s_umount_key/1 &sbinfo->stat_lock irq_context: 0 &type->s_umount_key/1 &sb->s_type->i_lock_key irq_context: 0 &type->s_umount_key/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key/1 batched_entropy_u32.lock irq_context: 0 &type->s_umount_key/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key/1 &sb->s_type->i_lock_key &dentry->d_lock irq_context: 0 &type->s_umount_key/1 &dentry->d_lock irq_context: 0 mnt_id_ida.xa_lock irq_context: 0 &dentry->d_lock irq_context: 0 mount_lock irq_context: 0 mount_lock mount_lock.seqcount irq_context: 0 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#2/1 irq_context: 0 &type->s_umount_key#2/1 pool_lock#2 irq_context: 0 &type->s_umount_key#2/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#2/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#2/1 shrinker_mutex irq_context: 0 &type->s_umount_key#2/1 &c->lock irq_context: 0 &type->s_umount_key#2/1 &____s->seqcount irq_context: 0 &type->s_umount_key#2/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#2/1 sb_lock irq_context: 0 &type->s_umount_key#2/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#2/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#2/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#2/1 &sb->s_type->i_lock_key#2 irq_context: 0 &type->s_umount_key#2/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#2/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#2/1 &sb->s_type->i_lock_key#2 &dentry->d_lock irq_context: 0 &type->s_umount_key#2/1 &dentry->d_lock irq_context: 0 ucounts_lock irq_context: 0 proc_inum_ida.xa_lock irq_context: 0 init_fs.lock irq_context: 0 init_fs.lock init_fs.seq.seqcount irq_context: hardirq jiffies_lock irq_context: hardirq jiffies_lock jiffies_seq.seqcount irq_context: hardirq log_wait.lock irq_context: 0 &type->s_umount_key#3/1 irq_context: 0 &type->s_umount_key#3/1 pool_lock#2 irq_context: 0 &type->s_umount_key#3/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#3/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#3/1 shrinker_mutex irq_context: 0 &type->s_umount_key#3/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#3/1 sb_lock irq_context: 0 &type->s_umount_key#3/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#3/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#3/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#3/1 &____s->seqcount irq_context: 0 &type->s_umount_key#3/1 &c->lock irq_context: 0 &type->s_umount_key#3/1 &sb->s_type->i_lock_key#3 irq_context: 0 &type->s_umount_key#3/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#3/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#3/1 &sb->s_type->i_lock_key#3 &dentry->d_lock irq_context: 0 &type->s_umount_key#3/1 &dentry->d_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex cpuhp_state-down irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex cpuhp_state-up irq_context: 0 proc_subdir_lock irq_context: 0 proc_subdir_lock irq_context: 0 pernet_ops_rwsem pool_lock#2 irq_context: 0 pernet_ops_rwsem proc_subdir_lock irq_context: 0 pernet_ops_rwsem proc_subdir_lock irq_context: 0 &type->s_umount_key#4/1 irq_context: 0 &type->s_umount_key#4/1 pool_lock#2 irq_context: 0 &type->s_umount_key#4/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#4/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#4/1 shrinker_mutex irq_context: 0 &type->s_umount_key#4/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#4/1 sb_lock irq_context: 0 &type->s_umount_key#4/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#4/1 &sb->s_type->i_lock_key#4 irq_context: 0 &type->s_umount_key#4/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#4/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#4/1 &sb->s_type->i_lock_key#4 &dentry->d_lock irq_context: 0 &type->s_umount_key#4/1 &dentry->d_lock irq_context: 0 cgroup_mutex pcpu_alloc_mutex irq_context: 0 cgroup_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 cgroup_mutex &c->lock irq_context: 0 cgroup_mutex &____s->seqcount irq_context: 0 cgroup_mutex pool_lock#2 irq_context: 0 cgroup_mutex lock irq_context: 0 cgroup_mutex lock kernfs_idr_lock irq_context: 0 cgroup_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 cgroup_mutex &root->kernfs_rwsem irq_context: 0 cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 cgroup_mutex &obj_hash[i].lock irq_context: 0 cgroup_mutex cgroup_file_kn_lock irq_context: 0 cgroup_mutex &obj_hash[i].lock pool_lock irq_context: 0 cgroup_mutex css_set_lock irq_context: 0 lock cgroup_idr_lock irq_context: 0 lock cgroup_idr_lock pool_lock#2 irq_context: 0 cpuset_mutex irq_context: 0 cpuset_mutex callback_lock irq_context: 0 cgroup_mutex blkcg_pol_mutex irq_context: 0 cgroup_mutex blkcg_pol_mutex pcpu_alloc_mutex irq_context: 0 cgroup_mutex blkcg_pol_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 cgroup_mutex lock cgroup_idr_lock irq_context: 0 cgroup_mutex lock cgroup_idr_lock pool_lock#2 irq_context: 0 cgroup_mutex percpu_counters_lock irq_context: 0 cgroup_mutex shrinker_mutex irq_context: 0 cgroup_mutex shrinker_mutex pool_lock#2 irq_context: 0 cgroup_mutex &base->lock irq_context: 0 cgroup_mutex &base->lock &obj_hash[i].lock irq_context: 0 cgroup_mutex devcgroup_mutex irq_context: 0 cgroup_mutex cpu_hotplug_lock irq_context: 0 cgroup_mutex cpu_hotplug_lock freezer_mutex irq_context: 0 rcu_state.exp_mutex rcu_node_0 irq_context: 0 rcu_state.exp_mutex rcu_state.exp_wake_mutex irq_context: 0 rcu_state.exp_mutex rcu_state.exp_wake_mutex rcu_node_0 irq_context: 0 rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_lock irq_context: 0 rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[0] irq_context: 0 rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[1] irq_context: 0 init_sighand.siglock irq_context: 0 init_mm.page_table_lock irq_context: 0 init_files.file_lock irq_context: 0 rcu_read_lock init_sighand.siglock irq_context: 0 lock pidmap_lock irq_context: 0 lock pidmap_lock pool_lock#2 irq_context: 0 pidmap_lock irq_context: 0 cgroup_threadgroup_rwsem irq_context: 0 cgroup_threadgroup_rwsem css_set_lock irq_context: 0 cgroup_threadgroup_rwsem &p->pi_lock irq_context: 0 cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem tk_core.seq.seqcount irq_context: 0 cgroup_threadgroup_rwsem tasklist_lock irq_context: 0 cgroup_threadgroup_rwsem tasklist_lock init_sighand.siglock irq_context: 0 cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock irq_context: 0 cgroup_threadgroup_rwsem css_set_lock &p->pi_lock irq_context: 0 cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock irq_context: 0 &p->pi_lock irq_context: 0 &p->pi_lock &rq->__lock irq_context: 0 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 &p->pi_lock &rq->__lock &base->lock irq_context: 0 &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &p->pi_lock irq_context: 0 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 (kthreadd_done).wait.lock irq_context: 0 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sighand->siglock irq_context: 0 &p->alloc_lock irq_context: 0 &p->alloc_lock &____s->seqcount#2 irq_context: 0 fs_reclaim irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kthread_create_lock irq_context: 0 &x->wait irq_context: 0 rcu_read_lock &sighand->siglock irq_context: 0 cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem tasklist_lock &sighand->siglock irq_context: 0 &x->wait &p->pi_lock irq_context: 0 &x->wait &p->pi_lock &rq->__lock irq_context: 0 &x->wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (worker)->lock irq_context: 0 wq_pool_mutex fs_reclaim irq_context: 0 wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 wq_pool_mutex kthread_create_lock irq_context: 0 wq_pool_mutex &p->pi_lock irq_context: 0 wq_pool_mutex &p->pi_lock &rq->__lock irq_context: 0 wq_pool_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 wq_pool_mutex &x->wait irq_context: 0 wq_pool_mutex &rq->__lock irq_context: 0 wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 wq_pool_mutex &obj_hash[i].lock irq_context: 0 wq_pool_attach_mutex irq_context: 0 wq_mayday_lock irq_context: 0 &xa->xa_lock irq_context: 0 &pool->lock irq_context: 0 &pool->lock &p->pi_lock irq_context: 0 &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (&pool->mayday_timer) irq_context: 0 &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rnp->exp_poll_wq) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rnp->exp_poll_wq) &rnp->exp_poll_lock irq_context: 0 (null) irq_context: 0 (null) tk_core.seq.seqcount irq_context: 0 (&wq_watchdog_timer) irq_context: 0 (wq_completion)events_unbound irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) allocation_wait.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq allocation_wait.lock irq_context: hardirq allocation_wait.lock &p->pi_lock irq_context: hardirq allocation_wait.lock &p->pi_lock &rq->__lock irq_context: hardirq allocation_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 batched_entropy_u8.lock irq_context: 0 batched_entropy_u8.lock crngs.lock irq_context: 0 kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &base->lock &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex irq_context: 0 rcu_tasks.cbs_gbl_lock irq_context: hardirq rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks__percpu.cbs_pcpu_lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex rcu_node_0 irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[2] irq_context: 0 rcu_tasks.tasks_gp_mutex &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex &base->lock irq_context: 0 rcu_tasks.tasks_gp_mutex &base->lock &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock &obj_hash[i].lock pool_lock irq_context: 0 rcu_tasks.tasks_gp_mutex &ACCESS_PRIVATE(sdp, lock) irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock rcu_read_lock &pool->lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rcu_tasks.tasks_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks.tasks_gp_mutex &x->wait#2 irq_context: 0 rcu_tasks.tasks_gp_mutex &rq->__lock irq_context: 0 rcu_tasks.tasks_gp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks_trace.tasks_gp_mutex irq_context: 0 rcu_tasks_trace.cbs_gbl_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex tasks_rcu_exit_srcu_srcu_usage.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex tasks_rcu_exit_srcu_srcu_usage.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) tasks_rcu_exit_srcu_srcu_usage.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &ACCESS_PRIVATE(sdp, lock) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &x->wait#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &x->wait#2 &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &x->wait#2 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &x->wait#2 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &x->wait#3 irq_context: 0 rcu_tasks.tasks_gp_mutex kernel/rcu/tasks.h:152 irq_context: softirq (&timer.timer) irq_context: softirq (&timer.timer) &p->pi_lock irq_context: softirq (&timer.timer) &p->pi_lock &rq->__lock irq_context: softirq (&timer.timer) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&kfence_timer)->timer irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks.tasks_gp_mutex (&timer.timer) irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[3] irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_tasks__percpu.cbs_pcpu_lock irq_context: 0 rcu_tasks.tasks_gp_mutex &x->wait#3 irq_context: 0 rcu_tasks.tasks_gp_mutex &x->wait#3 &p->pi_lock irq_context: 0 rcu_tasks.tasks_gp_mutex &x->wait#3 &p->pi_lock &rq->__lock irq_context: 0 rcu_tasks.tasks_gp_mutex &x->wait#3 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock rcu_tasks__percpu.cbs_pcpu_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock rcu_tasks__percpu.cbs_pcpu_lock &base->lock irq_context: 0 rcu_read_lock rcu_tasks__percpu.cbs_pcpu_lock &base->lock &obj_hash[i].lock irq_context: 0 rcu_tasks_trace__percpu.cbs_pcpu_lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex cpu_hotplug_lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex cpu_hotplug_lock rcu_tasks_trace__percpu.cbs_pcpu_lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex cpu_hotplug_lock &ACCESS_PRIVATE(rtpcp, lock) irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex rcu_node_0 irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[0] irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_tasks_trace__percpu.cbs_pcpu_lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &x->wait#3 irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &x->wait#3 &p->pi_lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &x->wait#3 &p->pi_lock &rq->__lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &x->wait#3 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock rcu_tasks_trace__percpu.cbs_pcpu_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock rcu_tasks_trace__percpu.cbs_pcpu_lock &base->lock irq_context: 0 rcu_read_lock rcu_tasks_trace__percpu.cbs_pcpu_lock &base->lock &obj_hash[i].lock irq_context: 0 (memory_chain).rwsem irq_context: 0 cpu_hotplug_lock smpboot_threads_lock irq_context: 0 cpu_hotplug_lock smpboot_threads_lock fs_reclaim irq_context: 0 cpu_hotplug_lock smpboot_threads_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_hotplug_lock smpboot_threads_lock pool_lock#2 irq_context: 0 cpu_hotplug_lock smpboot_threads_lock kthread_create_lock irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &p->pi_lock irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &x->wait irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &rq->__lock irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock smpboot_threads_lock &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock smpboot_threads_lock hrtimer_bases.lock irq_context: 0 cpu_hotplug_lock smpboot_threads_lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 cpu_hotplug_lock smpboot_threads_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 &rcu_state.gp_wq irq_context: 0 &stop_pi_lock irq_context: 0 &stop_pi_lock &rq->__lock irq_context: 0 &stop_pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &stopper->lock irq_context: 0 (module_notify_list).rwsem irq_context: 0 ddebug_lock irq_context: 0 iort_msi_chip_lock irq_context: 0 irq_domain_mutex irq_context: 0 irq_domain_mutex fs_reclaim irq_context: 0 irq_domain_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 irq_domain_mutex pool_lock#2 irq_context: 0 cci_probing irq_context: 0 cci_probing devtree_lock irq_context: 0 resource_lock irq_context: 0 fixmap_lock fs_reclaim irq_context: 0 fixmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 fixmap_lock &____s->seqcount irq_context: 0 fixmap_lock &c->lock irq_context: 0 fixmap_lock pool_lock#2 irq_context: 0 rcu_read_lock ptlock_ptr(ptdesc) irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex clockevents_lock irq_context: 0 watchdog_mutex irq_context: 0 watchdog_mutex cpu_hotplug_lock irq_context: 0 watchdog_mutex cpu_hotplug_lock &obj_hash[i].lock irq_context: 0 watchdog_mutex cpu_hotplug_lock rcu_read_lock &pool->lock irq_context: 0 watchdog_mutex cpu_hotplug_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 watchdog_mutex cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 watchdog_mutex cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 watchdog_mutex cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 watchdog_mutex cpu_hotplug_lock &x->wait#4 irq_context: 0 watchdog_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 watchdog_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) &x->wait#5 irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) hrtimer_bases.lock irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) &x->wait#4 irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) &x->wait#4 &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) &x->wait#4 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&sscs.work) &x->wait#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &newf->file_lock irq_context: 0 init_fs.lock &dentry->d_lock irq_context: 0 &p->vtime.seqcount irq_context: 0 cpu_hotplug_lock mem_hotplug_lock irq_context: 0 cpu_hotplug_lock mem_hotplug_lock mem_hotplug_lock.rss.gp_wait.lock irq_context: 0 cpu_hotplug_lock mem_hotplug_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 cpu_hotplug_lock mem_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex irq_context: 0 cpu_hotplug_lock mem_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex rcu_node_0 irq_context: 0 cpu_hotplug_lock mem_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_lock irq_context: 0 cpu_hotplug_lock mem_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[1] irq_context: 0 cpu_hotplug_lock mem_hotplug_lock.waiters.lock irq_context: 0 cpu_hotplug_lock mem_hotplug_lock.rss.gp_wait.lock irq_context: 0 cpu_hotplug_lock mem_hotplug_lock.rss.gp_wait.lock &obj_hash[i].lock irq_context: 0 cpu_add_remove_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock cpu_hotplug_lock.rss.gp_wait.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex rcu_node_0 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[2] irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock.waiters.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock.rss.gp_wait.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock.rss.gp_wait.lock &obj_hash[i].lock irq_context: 0 cpu_add_remove_lock cpuset_hotplug_work irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock fs_reclaim irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock pool_lock#2 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock kthread_create_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &p->pi_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &p->pi_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &x->wait irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &obj_hash[i].lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock &obj_hash[i].lock pool_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock hrtimer_bases.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock smpboot_threads_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: softirq &rcu_state.gp_wq &p->pi_lock irq_context: softirq &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: softirq &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback &obj_hash[i].lock irq_context: softirq rcu_callback pool_lock#2 irq_context: 0 rcu_tasks_trace.tasks_gp_mutex cpu_hotplug_lock cpu_hotplug_lock.waiters.lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &swhash->hlist_mutex irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock pmus_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock pmus_lock &cpuctx_mutex irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock pcp_batch_high_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &xa->xa_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock fs_reclaim irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock pool_lock#2 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &c->lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &____s->seqcount irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock kthread_create_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &p->pi_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &p->pi_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &x->wait irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &obj_hash[i].lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock hrtimer_bases.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: softirq rcu_callback mem_hotplug_lock.rss.gp_wait.lock irq_context: softirq rcu_callback mem_hotplug_lock.rss.gp_wait.lock &obj_hash[i].lock irq_context: softirq rcu_callback cpu_hotplug_lock.rss.gp_wait.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock wq_pool_attach_mutex irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &pool->lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &pool->lock &p->pi_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock pcpu_alloc_mutex irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock rcu_node_0 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock (cpu_running).wait.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &obj_hash[i].lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &base->lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &base->lock &obj_hash[i].lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 its_lock irq_context: 0 its_lock &its->lock irq_context: 0 clockevents_lock tick_broadcast_lock irq_context: 0 clockevents_lock jiffies_seq.seqcount irq_context: 0 clockevents_lock tk_core.seq.seqcount irq_context: 0 &irq_desc_lock_class irq_context: 0 &irq_desc_lock_class irq_controller_lock irq_context: 0 (cpu_running).wait.lock irq_context: 0 (cpu_running).wait.lock &p->pi_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock (&timer.timer) irq_context: 0 &x->wait#6 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &x->wait#6 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &p->pi_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &p->pi_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock sparse_irq_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &x->wait#6 irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up irq_context: 0 cpu_hotplug_lock cpuhp_state-up smpboot_threads_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up smpboot_threads_lock &p->pi_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up smpboot_threads_lock &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up smpboot_threads_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up smpboot_threads_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock cpuhp_state-up smpboot_threads_lock &rq->__lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up sparse_irq_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up sparse_irq_lock &irq_desc_lock_class irq_context: 0 cpu_hotplug_lock cpuhp_state-up &swhash->hlist_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state-up pmus_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up pmus_lock &cpuctx_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state-up &x->wait#5 irq_context: 0 cpu_hotplug_lock cpuhp_state-up &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &obj_hash[i].lock pool_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up hrtimer_bases.lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state-up hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up wq_pool_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state-up wq_pool_mutex wq_pool_attach_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state-up wq_pool_mutex wq_pool_attach_mutex &p->pi_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up wq_pool_mutex wq_pool_attach_mutex &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up wq_pool_mutex wq_pool_attach_mutex &x->wait#7 irq_context: 0 cpu_hotplug_lock cpuhp_state-up wq_pool_mutex wq_pool_attach_mutex &pool->lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_node_0 irq_context: 0 cpu_hotplug_lock cpuhp_state-up resource_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &pcp->lock &zone->lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state-up &____s->seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state-up &c->lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up pool_lock#2 irq_context: 0 cpu_hotplug_lock cpuhp_state-up resource_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_read_lock &pool->lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock cpuhp_state-up &rq->__lock irq_context: hardirq &rq->__lock &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &rq->__lock &rt_b->rt_runtime_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &rq->__lock &rt_b->rt_runtime_lock &rt_rq->rt_runtime_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &rq->__lock rcu_read_lock &cfs_b->lock irq_context: 0 &x->wait#6 &p->pi_lock irq_context: 0 &x->wait#6 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#6 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock.waiters.lock &p->pi_lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock.waiters.lock &p->pi_lock &rq->__lock irq_context: 0 cpu_add_remove_lock cpu_hotplug_lock.waiters.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock stop_cpus_mutex irq_context: 0 cpu_hotplug_lock stop_cpus_mutex &stopper->lock irq_context: 0 cpu_hotplug_lock stop_cpus_mutex &stop_pi_lock irq_context: 0 cpu_hotplug_lock stop_cpus_mutex &stop_pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock stop_cpus_mutex &stop_pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock stop_cpus_mutex &rq->__lock irq_context: 0 &x->wait#8 irq_context: 0 rcu_tasks_trace.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[3] irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &obj_hash[i].lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &base->lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &base->lock &obj_hash[i].lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &rq->__lock irq_context: 0 rcu_tasks_trace.tasks_gp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock stop_cpus_mutex &x->wait#8 irq_context: 0 &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events rdist_memreserve_cpuhp_cleanup_work irq_context: 0 (wq_completion)events rdist_memreserve_cpuhp_cleanup_work cpu_hotplug_lock irq_context: 0 (wq_completion)events rdist_memreserve_cpuhp_cleanup_work cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 sched_domains_mutex irq_context: 0 sched_domains_mutex fs_reclaim irq_context: 0 sched_domains_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sched_domains_mutex pool_lock#2 irq_context: 0 sched_domains_mutex &obj_hash[i].lock irq_context: 0 sched_domains_mutex pcpu_alloc_mutex irq_context: 0 sched_domains_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sched_domains_mutex &c->lock irq_context: 0 sched_domains_mutex &____s->seqcount irq_context: 0 sched_domains_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 sched_domains_mutex rcu_read_lock pool_lock#2 irq_context: 0 sched_domains_mutex rcu_read_lock &rq->__lock irq_context: 0 sched_domains_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sched_domains_mutex rcu_read_lock &rq->__lock &cp->lock irq_context: 0 sched_domains_mutex rcu_read_lock &rq->__lock &rt_b->rt_runtime_lock irq_context: 0 sched_domains_mutex rcu_read_lock &rq->__lock &rt_b->rt_runtime_lock &rt_rq->rt_runtime_lock irq_context: 0 sched_domains_mutex rcu_read_lock &rq->__lock rcu_read_lock &cfs_b->lock irq_context: 0 sched_domains_mutex pcpu_lock irq_context: 0 slab_mutex fs_reclaim irq_context: 0 slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#5/1 irq_context: 0 &type->s_umount_key#5/1 fs_reclaim irq_context: 0 &type->s_umount_key#5/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#5/1 pool_lock#2 irq_context: 0 &type->s_umount_key#5/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#5/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#5/1 shrinker_mutex irq_context: 0 &type->s_umount_key#5/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#5/1 sb_lock irq_context: 0 &type->s_umount_key#5/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#5/1 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#5/1 percpu_counters_lock irq_context: 0 &type->s_umount_key#5/1 crngs.lock irq_context: 0 &type->s_umount_key#5/1 &sbinfo->stat_lock irq_context: 0 &type->s_umount_key#5/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#5/1 &sb->s_type->i_lock_key#5 irq_context: 0 &type->s_umount_key#5/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#5/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#5/1 batched_entropy_u32.lock irq_context: 0 &type->s_umount_key#5/1 &sb->s_type->i_lock_key#5 &dentry->d_lock irq_context: 0 &type->s_umount_key#5/1 &dentry->d_lock irq_context: 0 (setup_done).wait.lock irq_context: 0 namespace_sem irq_context: 0 namespace_sem fs_reclaim irq_context: 0 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 namespace_sem &pcp->lock &zone->lock irq_context: 0 namespace_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 namespace_sem &____s->seqcount irq_context: 0 namespace_sem &c->lock irq_context: 0 namespace_sem pool_lock#2 irq_context: 0 namespace_sem mnt_id_ida.xa_lock irq_context: 0 namespace_sem pcpu_alloc_mutex irq_context: 0 namespace_sem pcpu_alloc_mutex pcpu_lock irq_context: 0 namespace_sem &dentry->d_lock irq_context: 0 namespace_sem mount_lock irq_context: 0 namespace_sem mount_lock mount_lock.seqcount irq_context: 0 &p->alloc_lock init_fs.lock irq_context: 0 rcu_read_lock &____s->seqcount#3 irq_context: 0 file_systems_lock irq_context: 0 &type->s_umount_key#6 irq_context: 0 &type->s_umount_key#6 fs_reclaim irq_context: 0 &type->s_umount_key#6 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#6 pool_lock#2 irq_context: 0 &type->s_umount_key#6 &dentry->d_lock irq_context: 0 &type->s_umount_key#6 &____s->seqcount irq_context: 0 &type->s_umount_key#6 &c->lock irq_context: 0 &type->s_umount_key#6 &lru->node[i].lock irq_context: 0 &type->s_umount_key#6 &sbinfo->stat_lock irq_context: 0 &type->s_umount_key#6 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#6 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key irq_context: 0 &sb->s_type->i_mutex_key namespace_sem irq_context: 0 &sb->s_type->i_mutex_key namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key namespace_sem fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key namespace_sem pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key namespace_sem rename_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock mount_lock.seqcount &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock mount_lock.seqcount rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock mount_lock.seqcount &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem mount_lock mount_lock.seqcount pool_lock#2 irq_context: 0 rcu_read_lock &sb->s_type->i_lock_key#2 irq_context: 0 rcu_read_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_lock_key#5 irq_context: 0 &fs->lock irq_context: 0 &fs->lock &____s->seqcount#3 irq_context: 0 (setup_done).wait.lock &p->pi_lock irq_context: 0 req_lock irq_context: 0 of_mutex irq_context: 0 of_mutex fs_reclaim irq_context: 0 of_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 of_mutex pool_lock#2 irq_context: 0 of_mutex lock irq_context: 0 of_mutex lock kernfs_idr_lock irq_context: 0 of_mutex &root->kernfs_rwsem irq_context: 0 of_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &x->wait#9 irq_context: 0 &k->list_lock irq_context: 0 bus_type_sem irq_context: 0 &root->kernfs_rwsem irq_context: 0 &dev->power.lock irq_context: 0 dpm_list_mtx irq_context: 0 uevent_sock_mutex irq_context: 0 running_helpers_waitq.lock irq_context: 0 sysfs_symlink_target_lock irq_context: 0 &k->k_lock irq_context: 0 &dev->mutex &k->list_lock irq_context: 0 &dev->mutex &k->k_lock irq_context: 0 &dev->mutex &dev->power.lock irq_context: 0 subsys mutex irq_context: 0 memory_blocks.xa_lock irq_context: 0 memory_blocks.xa_lock pool_lock#2 irq_context: 0 lock kernfs_idr_lock &c->lock irq_context: 0 lock kernfs_idr_lock &____s->seqcount irq_context: 0 rcu_tasks_trace.tasks_gp_mutex (&timer.timer) irq_context: 0 rcu_tasks_trace.tasks_gp_mutex (console_sem).lock irq_context: softirq &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 subsys mutex#2 irq_context: 0 subsys mutex#3 irq_context: 0 dev_pm_qos_mtx irq_context: 0 dev_pm_qos_mtx fs_reclaim irq_context: 0 dev_pm_qos_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 dev_pm_qos_mtx pool_lock#2 irq_context: 0 dev_pm_qos_mtx &dev->power.lock irq_context: 0 dev_pm_qos_mtx pm_qos_lock irq_context: 0 dev_pm_qos_sysfs_mtx irq_context: 0 dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 dev_pm_qos_sysfs_mtx fs_reclaim irq_context: 0 dev_pm_qos_sysfs_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 dev_pm_qos_sysfs_mtx pool_lock#2 irq_context: 0 dev_pm_qos_sysfs_mtx lock irq_context: 0 dev_pm_qos_sysfs_mtx lock kernfs_idr_lock irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 dev_pm_qos_mtx &c->lock irq_context: 0 dev_pm_qos_mtx &pcp->lock &zone->lock irq_context: 0 dev_pm_qos_mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 dev_pm_qos_mtx &____s->seqcount irq_context: 0 register_lock irq_context: 0 register_lock proc_subdir_lock irq_context: 0 register_lock fs_reclaim irq_context: 0 register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 register_lock pool_lock#2 irq_context: 0 register_lock proc_inum_ida.xa_lock irq_context: 0 register_lock proc_subdir_lock irq_context: 0 register_lock &c->lock irq_context: 0 register_lock &____s->seqcount irq_context: 0 register_lock proc_inum_ida.xa_lock pool_lock#2 irq_context: softirq (&timer.timer) &p->pi_lock &cfs_rq->removed.lock irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex &rcu_state.expedited_wq irq_context: hardirq &rcu_state.expedited_wq irq_context: hardirq &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &rq->__lock irq_context: 0 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &wq->mutex &pool->lock &p->pi_lock irq_context: 0 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 batched_entropy_u64.lock crngs.lock base_crng.lock irq_context: 0 &x->wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cpu_pm_notifier.lock irq_context: 0 (cpufreq_policy_notifier_list).rwsem irq_context: 0 (pm_chain_head).rwsem irq_context: 0 cpufreq_governor_mutex irq_context: 0 rcu_state.exp_mutex rcu_state.exp_wake_mutex &rnp->exp_wq[2] irq_context: 0 rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 rcu_state.exp_mutex &rq->__lock irq_context: 0 rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[3] &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[3] &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[3] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[0] irq_context: 0 clocksource_mutex irq_context: 0 syscore_ops_lock irq_context: 0 binfmt_lock irq_context: 0 pin_fs_lock irq_context: 0 &type->s_umount_key#7/1 irq_context: 0 &type->s_umount_key#7/1 fs_reclaim irq_context: 0 &type->s_umount_key#7/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#7/1 pool_lock#2 irq_context: 0 &type->s_umount_key#7/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#7/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#7/1 shrinker_mutex irq_context: 0 &type->s_umount_key#7/1 &____s->seqcount irq_context: 0 &type->s_umount_key#7/1 &c->lock irq_context: 0 &type->s_umount_key#7/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#7/1 sb_lock irq_context: 0 &type->s_umount_key#7/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#7/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#7/1 &sb->s_type->i_lock_key#6 irq_context: 0 &type->s_umount_key#7/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#7/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#7/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#7/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#7/1 &sb->s_type->i_lock_key#6 &dentry->d_lock irq_context: 0 &type->s_umount_key#7/1 &dentry->d_lock irq_context: 0 rcu_read_lock mount_lock irq_context: 0 rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#2 irq_context: 0 &sb->s_type->i_mutex_key#2 &sb->s_type->i_lock_key#6 irq_context: 0 &sb->s_type->i_mutex_key#2 rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#2 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#2 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#2 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#2 rcu_read_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#2 &dentry->d_lock &wq irq_context: 0 &sb->s_type->i_mutex_key#2 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#2 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#2 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#2 &sb->s_type->i_lock_key#6 &dentry->d_lock irq_context: 0 &type->s_umount_key#8/1 irq_context: 0 &type->s_umount_key#8/1 fs_reclaim irq_context: 0 &type->s_umount_key#8/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#8/1 pool_lock#2 irq_context: 0 &type->s_umount_key#8/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#8/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#8/1 shrinker_mutex irq_context: 0 &type->s_umount_key#8/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#8/1 sb_lock irq_context: 0 &type->s_umount_key#8/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#8/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#8/1 &sb->s_type->i_lock_key#7 irq_context: 0 &type->s_umount_key#8/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#8/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#8/1 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 &type->s_umount_key#8/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#3 irq_context: 0 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 chrdevs_lock irq_context: 0 cb_lock irq_context: 0 cb_lock genl_mutex irq_context: 0 cb_lock genl_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex pool_lock#2 irq_context: 0 subsys mutex#4 irq_context: 0 async_lock irq_context: 0 rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) irq_context: 0 regulator_list_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex device_links_srcu irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->power.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex fwnode_link_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex device_links_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->devres_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pinctrl_list_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pinctrl_maps_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &k->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &x->wait#9 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex regulator_nesting_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex regulator_ww_class_mutex regulator_nesting_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex devtree_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex &k->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex bus_type_sem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &c->lock irq_context: 0 &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &root->kernfs_rwsem irq_context: 0 (reboot_notifier_list).rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dpm_list_mtx irq_context: 0 purge_vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex uevent_sock_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex running_helpers_waitq.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex subsys mutex#5 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex subsys mutex#5 &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pin_fs_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 purge_vmap_area_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex regulator_list_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex deferred_probe_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &x->wait#6 irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &rq->__lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex rcu_read_lock &rq->__lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex probe_waitqueue.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_done.lock irq_context: 0 &type->s_umount_key#9/1 irq_context: 0 &type->s_umount_key#9/1 fs_reclaim irq_context: 0 &type->s_umount_key#9/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#9/1 pool_lock#2 irq_context: 0 &type->s_umount_key#9/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#9/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#9/1 shrinker_mutex irq_context: 0 &type->s_umount_key#9/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#9/1 sb_lock irq_context: 0 &type->s_umount_key#9/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#9/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#9/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#9/1 &____s->seqcount irq_context: 0 &type->s_umount_key#9/1 &c->lock irq_context: 0 &type->s_umount_key#9/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#9/1 &sb->s_type->i_lock_key#8 irq_context: 0 &type->s_umount_key#9/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#9/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#9/1 &sb->s_type->i_lock_key#8 &dentry->d_lock irq_context: 0 &type->s_umount_key#9/1 &dentry->d_lock irq_context: 0 pernet_ops_rwsem fs_reclaim irq_context: 0 pernet_ops_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem &____s->seqcount irq_context: 0 pernet_ops_rwsem &c->lock irq_context: 0 pernet_ops_rwsem sysctl_lock irq_context: 0 vmap_purge_lock irq_context: 0 vmap_purge_lock purge_vmap_area_lock irq_context: 0 vmap_purge_lock free_vmap_area_lock irq_context: 0 vmap_purge_lock free_vmap_area_lock &obj_hash[i].lock irq_context: 0 vmap_purge_lock free_vmap_area_lock pool_lock#2 irq_context: 0 vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock irq_context: 0 vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &obj_hash[i].lock irq_context: 0 &fp->aux->used_maps_mutex irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex pcpu_lock irq_context: 0 proto_list_mutex irq_context: 0 targets_mutex irq_context: 0 nl_table_lock irq_context: 0 nl_table_wait.lock irq_context: 0 net_family_lock irq_context: 0 pernet_ops_rwsem net_generic_ids.xa_lock irq_context: 0 pernet_ops_rwsem mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem &sb->s_type->i_lock_key#8 irq_context: 0 pernet_ops_rwsem &dir->lock irq_context: 0 pernet_ops_rwsem &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK k-slock-AF_NETLINK irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rhashtable_bucket irq_context: 0 pernet_ops_rwsem k-slock-AF_NETLINK irq_context: 0 pernet_ops_rwsem nl_table_lock irq_context: 0 pernet_ops_rwsem nl_table_wait.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex irq_context: 0 rtnl_mutex fs_reclaim irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sparse_irq_lock irq_context: 0 sparse_irq_lock fs_reclaim irq_context: 0 sparse_irq_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sparse_irq_lock pool_lock#2 irq_context: 0 sparse_irq_lock &c->lock irq_context: 0 sparse_irq_lock &____s->seqcount irq_context: 0 sparse_irq_lock lock irq_context: 0 sparse_irq_lock lock kernfs_idr_lock irq_context: 0 sparse_irq_lock &root->kernfs_rwsem irq_context: 0 sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sparse_irq_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &cma->lock irq_context: 0 cma_mutex irq_context: 0 cma_mutex &zone->lock irq_context: 0 cma_mutex &zone->lock &____s->seqcount irq_context: 0 cma_mutex pcpu_drain_mutex &pcp->lock irq_context: 0 cma_mutex pcpu_drain_mutex &pcp->lock &zone->lock irq_context: 0 cma_mutex pcpu_drain_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cma_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 cma_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cma_mutex rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 cma_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 cma_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[1] &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[1] &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[1] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cma_mutex &obj_hash[i].lock irq_context: 0 cma_mutex lock#2 irq_context: 0 &pool->lock#2 irq_context: 0 cma_mutex rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[2] &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[2] &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[2] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 freezer_lock irq_context: 0 audit_backlog_wait.lock irq_context: 0 &list->lock irq_context: 0 kauditd_wait.lock irq_context: 0 kauditd_wait.lock &p->pi_lock irq_context: 0 lock#3 irq_context: 0 lock#3 &zone->lock irq_context: 0 pcp_batch_high_lock irq_context: 0 khugepaged_mutex irq_context: 0 &(&priv->bus_notifier)->rwsem irq_context: 0 gdp_mutex irq_context: 0 gdp_mutex &k->list_lock irq_context: 0 gdp_mutex fs_reclaim irq_context: 0 gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 gdp_mutex pool_lock#2 irq_context: 0 gdp_mutex lock irq_context: 0 gdp_mutex lock kernfs_idr_lock irq_context: 0 gdp_mutex &root->kernfs_rwsem irq_context: 0 gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 subsys mutex#6 irq_context: 0 subsys mutex#6 &k->k_lock irq_context: 0 subsys mutex#7 irq_context: 0 subsys mutex#7 &k->list_lock irq_context: 0 subsys mutex#7 &k->k_lock irq_context: 0 regmap_debugfs_early_lock irq_context: 0 (acpi_reconfig_chain).rwsem irq_context: 0 __i2c_board_lock irq_context: 0 quarantine_lock irq_context: 0 core_lock irq_context: 0 core_lock &k->list_lock irq_context: 0 core_lock &k->k_lock irq_context: 0 cb_lock genl_mutex nl_table_lock irq_context: 0 cb_lock genl_mutex nl_table_wait.lock irq_context: 0 remove_cache_srcu irq_context: 0 remove_cache_srcu quarantine_lock irq_context: 0 nl_table_lock irq_context: 0 thermal_governor_lock irq_context: 0 thermal_governor_lock thermal_list_lock irq_context: 0 cpuidle_lock irq_context: 0 cpuidle_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 cpuidle_lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 cpuidle_lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 cpuidle_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cpuidle_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cpuidle_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cpuidle_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpuidle_lock rcu_state.exp_mutex &rq->__lock irq_context: 0 cpuidle_lock &obj_hash[i].lock irq_context: 0 cpuidle_lock (console_sem).lock irq_context: 0 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_lock_key#8 irq_context: 0 &dir->lock irq_context: 0 k-sk_lock-AF_QIPCRTR irq_context: 0 k-sk_lock-AF_QIPCRTR k-slock-AF_QIPCRTR irq_context: 0 k-slock-AF_QIPCRTR irq_context: 0 k-sk_lock-AF_QIPCRTR fs_reclaim irq_context: 0 k-sk_lock-AF_QIPCRTR fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 k-sk_lock-AF_QIPCRTR qrtr_ports.xa_lock irq_context: 0 k-sk_lock-AF_QIPCRTR pool_lock#2 irq_context: 0 k-sk_lock-AF_QIPCRTR qrtr_node_lock irq_context: 0 k-sk_lock-AF_QIPCRTR &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex irq_context: 0 resource_lock pool_lock#2 irq_context: 0 resource_lock &obj_hash[i].lock irq_context: 0 resource_lock &c->lock irq_context: 0 resource_lock &____s->seqcount irq_context: 0 crngs.lock irq_context: 0 (crypto_chain).rwsem irq_context: 0 tty_mutex irq_context: 0 iova_cache_mutex irq_context: 0 iova_cache_mutex cpu_hotplug_lock irq_context: 0 iova_cache_mutex cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 iova_cache_mutex slab_mutex irq_context: 0 iova_cache_mutex slab_mutex fs_reclaim irq_context: 0 iova_cache_mutex slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 iova_cache_mutex slab_mutex pool_lock#2 irq_context: 0 iova_cache_mutex slab_mutex pcpu_alloc_mutex irq_context: 0 iova_cache_mutex slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 gdp_mutex &c->lock irq_context: 0 gdp_mutex &____s->seqcount irq_context: 0 subsys mutex#8 irq_context: 0 subsys mutex#8 &k->k_lock irq_context: 0 device_links_lock irq_context: 0 uidhash_lock irq_context: 0 rcu_state.barrier_mutex irq_context: 0 &type->s_umount_key#24/1 &wq->mutex &x->wait#10 irq_context: 0 &type->s_umount_key#24/1 wq_mayday_lock irq_context: 0 oom_reaper_wait.lock irq_context: 0 subsys mutex#9 irq_context: 0 &pgdat->kcompactd_wait irq_context: 0 hugetlb_lock irq_context: 0 &type->s_umount_key#24/1 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 memory_tier_lock irq_context: 0 memory_tier_lock fs_reclaim irq_context: 0 memory_tier_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 memory_tier_lock pool_lock#2 irq_context: 0 memory_tier_lock &x->wait#9 irq_context: 0 memory_tier_lock &obj_hash[i].lock irq_context: 0 memory_tier_lock &k->list_lock irq_context: 0 memory_tier_lock &c->lock irq_context: 0 memory_tier_lock &____s->seqcount irq_context: 0 memory_tier_lock lock irq_context: 0 memory_tier_lock lock kernfs_idr_lock irq_context: 0 memory_tier_lock &root->kernfs_rwsem irq_context: 0 memory_tier_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 memory_tier_lock bus_type_sem irq_context: 0 memory_tier_lock sysfs_symlink_target_lock irq_context: 0 memory_tier_lock &k->k_lock irq_context: 0 memory_tier_lock &root->kernfs_rwsem irq_context: 0 memory_tier_lock &dev->power.lock irq_context: 0 memory_tier_lock dpm_list_mtx irq_context: 0 memory_tier_lock uevent_sock_mutex irq_context: 0 memory_tier_lock running_helpers_waitq.lock irq_context: 0 memory_tier_lock &dev->mutex &k->list_lock irq_context: 0 memory_tier_lock &dev->mutex &k->k_lock irq_context: 0 memory_tier_lock &dev->mutex &dev->power.lock irq_context: 0 memory_tier_lock subsys mutex#10 irq_context: 0 memory_tier_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 memory_tier_lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 memory_tier_lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 memory_tier_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 memory_tier_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 memory_tier_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 memory_tier_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 memory_tier_lock rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 memory_tier_lock rcu_state.exp_mutex &rq->__lock irq_context: 0 memory_tier_lock rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rq->__lock irq_context: 0 khugepaged_mutex fs_reclaim irq_context: 0 khugepaged_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 khugepaged_mutex pool_lock#2 irq_context: 0 khugepaged_mutex kthread_create_lock irq_context: 0 khugepaged_mutex &p->pi_lock irq_context: 0 khugepaged_mutex &p->pi_lock &rq->__lock irq_context: 0 khugepaged_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 khugepaged_mutex &x->wait irq_context: 0 khugepaged_mutex &rq->__lock irq_context: 0 khugepaged_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ksm_thread_mutex irq_context: 0 ksm_thread_wait.lock irq_context: 0 khugepaged_mutex &obj_hash[i].lock irq_context: 0 lock#2 irq_context: 0 khugepaged_mm_lock irq_context: 0 khugepaged_wait.lock irq_context: 0 khugepaged_mutex lock#3 irq_context: 0 khugepaged_mutex lock#3 &zone->lock irq_context: 0 khugepaged_mutex pcp_batch_high_lock irq_context: 0 cgroup_mutex fs_reclaim irq_context: 0 cgroup_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 damon_ops_lock irq_context: 0 crypto_alg_sem irq_context: 0 crypto_alg_sem (crypto_chain).rwsem irq_context: 0 cpu_hotplug_lock fs_reclaim irq_context: 0 cpu_hotplug_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_hotplug_lock pool_lock#2 irq_context: 0 cpu_hotplug_lock pcpu_alloc_mutex irq_context: 0 cpu_hotplug_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 cpu_hotplug_lock &wq->mutex irq_context: 0 cpu_hotplug_lock &wq->mutex &pool->lock irq_context: 0 cpu_hotplug_lock kthread_create_lock irq_context: 0 cpu_hotplug_lock &p->pi_lock irq_context: 0 cpu_hotplug_lock &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock &x->wait irq_context: 0 cpu_hotplug_lock &rq->__lock irq_context: 0 cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pool->lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pool->lock (worker)->lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pool->lock (worker)->lock &p->pi_lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pool->lock (worker)->lock &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pool->lock (worker)->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock &____s->seqcount irq_context: 0 bio_slab_lock irq_context: 0 bio_slab_lock fs_reclaim irq_context: 0 bio_slab_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 bio_slab_lock pool_lock#2 irq_context: 0 bio_slab_lock slab_mutex irq_context: 0 bio_slab_lock slab_mutex fs_reclaim irq_context: 0 bio_slab_lock slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 bio_slab_lock slab_mutex pool_lock#2 irq_context: 0 bio_slab_lock slab_mutex &c->lock irq_context: 0 bio_slab_lock slab_mutex &____s->seqcount irq_context: 0 bio_slab_lock slab_mutex pcpu_alloc_mutex irq_context: 0 bio_slab_lock slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 bio_slab_lock bio_slabs.xa_lock irq_context: 0 bio_slab_lock bio_slabs.xa_lock pool_lock#2 irq_context: 0 bio_slab_lock bio_slabs.xa_lock &c->lock irq_context: 0 bio_slab_lock bio_slabs.xa_lock &____s->seqcount irq_context: 0 major_names_lock irq_context: 0 major_names_lock fs_reclaim irq_context: 0 major_names_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 major_names_lock pool_lock#2 irq_context: 0 major_names_lock major_names_spinlock irq_context: 0 &p->alloc_lock &x->wait &p->pi_lock irq_context: softirq &(&kfence_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 rcu_tasks.tasks_gp_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &base->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&rtpcp->lazy_timer) irq_context: softirq (&rtpcp->lazy_timer) rcu_tasks_trace__percpu.cbs_pcpu_lock irq_context: softirq (&rtpcp->lazy_timer) rcu_tasks__percpu.cbs_pcpu_lock irq_context: softirq (&rtpcp->lazy_timer) rcu_tasks__percpu.cbs_pcpu_lock &obj_hash[i].lock irq_context: softirq (&rtpcp->lazy_timer) rcu_tasks__percpu.cbs_pcpu_lock &base->lock irq_context: softirq (&rtpcp->lazy_timer) rcu_tasks__percpu.cbs_pcpu_lock &base->lock &obj_hash[i].lock irq_context: 0 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) (&timer.timer) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[2] &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_tasks.tasks_gp_mutex (console_sem).lock irq_context: softirq (&pool->mayday_timer) &pool->lock irq_context: softirq (&pool->mayday_timer) &pool->lock wq_mayday_lock irq_context: softirq (&pool->mayday_timer) &obj_hash[i].lock irq_context: softirq (&pool->mayday_timer) &base->lock irq_context: softirq (&pool->mayday_timer) &base->lock &obj_hash[i].lock irq_context: 0 &pgdat->kswapd_lock irq_context: softirq drivers/char/random.c:251 irq_context: softirq drivers/char/random.c:251 rcu_read_lock &pool->lock irq_context: softirq drivers/char/random.c:251 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq drivers/char/random.c:251 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq drivers/char/random.c:251 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq drivers/char/random.c:251 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (next_reseed).work irq_context: 0 (wq_completion)events_unbound (next_reseed).work &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (next_reseed).work &base->lock irq_context: 0 (wq_completion)events_unbound (next_reseed).work &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (next_reseed).work input_pool.lock irq_context: 0 (wq_completion)events_unbound (next_reseed).work base_crng.lock irq_context: softirq mm/vmstat.c:2022 irq_context: softirq mm/vmstat.c:2022 rcu_read_lock &pool->lock irq_context: softirq mm/vmstat.c:2022 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq mm/vmstat.c:2022 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq mm/vmstat.c:2022 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq mm/vmstat.c:2022 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (shepherd).work irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (shepherd).work &obj_hash[i].lock irq_context: 0 (wq_completion)events (shepherd).work &base->lock irq_context: 0 (wq_completion)events (shepherd).work &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mm_percpu_wq irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &obj_hash[i].lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &base->lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &base->lock &obj_hash[i].lock irq_context: 0 &pool->lock &base->lock irq_context: 0 &pool->lock &base->lock &obj_hash[i].lock irq_context: 0 slab_mutex batched_entropy_u8.lock irq_context: 0 slab_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 slab_mutex batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 slab_mutex kfence_freelist_lock irq_context: 0 console_lock fs_reclaim irq_context: 0 console_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 console_lock &x->wait#9 irq_context: 0 console_lock &k->list_lock irq_context: 0 console_lock gdp_mutex irq_context: 0 console_lock gdp_mutex &k->list_lock irq_context: 0 console_lock gdp_mutex fs_reclaim irq_context: 0 console_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 console_lock gdp_mutex pool_lock#2 irq_context: 0 console_lock gdp_mutex lock irq_context: 0 console_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 console_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 console_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 console_lock lock irq_context: 0 console_lock lock kernfs_idr_lock irq_context: 0 console_lock &root->kernfs_rwsem irq_context: 0 console_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 console_lock bus_type_sem irq_context: 0 console_lock sysfs_symlink_target_lock irq_context: 0 console_lock &root->kernfs_rwsem irq_context: 0 console_lock &dev->power.lock irq_context: 0 console_lock dpm_list_mtx irq_context: 0 console_lock uevent_sock_mutex irq_context: 0 console_lock running_helpers_waitq.lock irq_context: 0 console_lock subsys mutex#11 irq_context: 0 console_lock subsys mutex#11 &k->k_lock irq_context: 0 shrink_qlist.lock irq_context: 0 remove_cache_srcu_srcu_usage.lock irq_context: 0 remove_cache_srcu_srcu_usage.lock &obj_hash[i].lock irq_context: 0 &ACCESS_PRIVATE(sdp, lock) irq_context: 0 remove_cache_srcu irq_context: 0 remove_cache_srcu_srcu_usage.lock rcu_read_lock &pool->lock irq_context: 0 remove_cache_srcu_srcu_usage.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 remove_cache_srcu_srcu_usage.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 remove_cache_srcu_srcu_usage.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 remove_cache_srcu_srcu_usage.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex remove_cache_srcu_srcu_usage.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) remove_cache_srcu_srcu_usage.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &base->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &base->lock &obj_hash[i].lock irq_context: 0 &x->wait#2 irq_context: softirq &(&ssp->srcu_sup->work)->timer irq_context: softirq &(&ssp->srcu_sup->work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&ssp->srcu_sup->work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&ssp->srcu_sup->work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&ssp->srcu_sup->work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&ssp->srcu_sup->work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex remove_cache_srcu_srcu_usage.lock irq_context: 0 cpu_hotplug_lock flush_lock irq_context: 0 cpu_hotplug_lock flush_lock &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock flush_lock rcu_read_lock &pool->lock irq_context: 0 cpu_hotplug_lock flush_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock flush_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cpu_hotplug_lock flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock flush_lock (work_completion)(&sfw->work) irq_context: 0 cpu_hotplug_lock flush_lock rcu_read_lock (wq_completion)slub_flushwq irq_context: 0 cpu_hotplug_lock flush_lock &x->wait#10 irq_context: 0 cpu_hotplug_lock flush_lock &rq->__lock irq_context: 0 cpu_hotplug_lock flush_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)slub_flushwq irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&sfw->work) irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&sfw->work) &c->lock irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&sfw->work) &n->list_lock irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&sfw->work) &obj_hash[i].lock irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&barr->work) irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&barr->work) &x->wait#10 irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)slub_flushwq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq allocation_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &meta->lock irq_context: 0 &n->list_lock &c->lock irq_context: 0 clk_debug_lock irq_context: 0 clocks_mutex irq_context: 0 acpi_scan_lock irq_context: 0 acpi_scan_lock semaphore->lock irq_context: 0 acpi_scan_lock fs_reclaim irq_context: 0 acpi_scan_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock pool_lock#2 irq_context: 0 acpi_scan_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &x->wait#9 irq_context: 0 acpi_scan_lock &obj_hash[i].lock pool_lock irq_context: 0 acpi_scan_lock acpi_device_lock irq_context: 0 acpi_scan_lock acpi_device_lock fs_reclaim irq_context: 0 acpi_scan_lock acpi_device_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_device_lock pool_lock#2 irq_context: 0 acpi_scan_lock acpi_device_lock &xa->xa_lock#2 irq_context: 0 acpi_scan_lock acpi_device_lock semaphore->lock irq_context: 0 acpi_scan_lock acpi_device_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &k->list_lock irq_context: 0 acpi_scan_lock lock irq_context: 0 acpi_scan_lock lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock bus_type_sem irq_context: 0 acpi_scan_lock sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &k->k_lock irq_context: 0 acpi_scan_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 acpi_scan_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &c->lock irq_context: 0 acpi_scan_lock &____s->seqcount irq_context: 0 acpi_scan_lock &dev->power.lock irq_context: 0 acpi_scan_lock dpm_list_mtx irq_context: 0 acpi_scan_lock &dev->mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex &dev->power.lock irq_context: 0 acpi_scan_lock subsys mutex#12 irq_context: 0 acpi_scan_lock uevent_sock_mutex irq_context: 0 acpi_scan_lock running_helpers_waitq.lock irq_context: 0 acpi_scan_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 acpi_scan_lock &pcp->lock &zone->lock irq_context: 0 acpi_scan_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 acpi_scan_lock acpi_device_lock &c->lock irq_context: 0 acpi_scan_lock acpi_device_lock &____s->seqcount irq_context: 0 acpi_scan_lock &n->list_lock irq_context: 0 acpi_scan_lock &n->list_lock &c->lock irq_context: 0 acpi_scan_lock acpi_ioremap_lock irq_context: 0 acpi_scan_lock acpi_ioremap_lock fs_reclaim irq_context: 0 acpi_scan_lock acpi_ioremap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_ioremap_lock pool_lock#2 irq_context: 0 acpi_scan_lock acpi_ioremap_lock &____s->seqcount irq_context: 0 acpi_scan_lock acpi_ioremap_lock free_vmap_area_lock irq_context: 0 acpi_scan_lock acpi_ioremap_lock free_vmap_area_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock acpi_ioremap_lock free_vmap_area_lock pool_lock#2 irq_context: 0 acpi_scan_lock acpi_ioremap_lock vmap_area_lock irq_context: 0 acpi_scan_lock quarantine_lock irq_context: 0 acpi_scan_lock &device->physical_node_lock irq_context: 0 acpi_scan_lock &device->physical_node_lock sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &device->physical_node_lock fs_reclaim irq_context: 0 acpi_scan_lock &device->physical_node_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &device->physical_node_lock pool_lock#2 irq_context: 0 acpi_scan_lock &device->physical_node_lock lock irq_context: 0 acpi_scan_lock &device->physical_node_lock lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &device->physical_node_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &device->physical_node_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &device->physical_node_lock &c->lock irq_context: 0 acpi_scan_lock &device->physical_node_lock &____s->seqcount irq_context: 0 acpi_scan_lock irq_domain_mutex irq_context: 0 acpi_scan_lock &domain->mutex irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock fs_reclaim irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock pool_lock#2 irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock pcpu_alloc_mutex irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &____s->seqcount irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &c->lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &domain->mutex &irq_desc_lock_class irq_context: 0 acpi_scan_lock &domain->mutex fs_reclaim irq_context: 0 acpi_scan_lock &domain->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &domain->mutex pool_lock#2 irq_context: 0 acpi_scan_lock resource_lock irq_context: 0 acpi_scan_lock &(&priv->bus_notifier)->rwsem irq_context: 0 acpi_scan_lock &(&priv->bus_notifier)->rwsem &device->physical_node_lock irq_context: 0 acpi_scan_lock fwnode_link_lock irq_context: 0 acpi_scan_lock fwnode_link_lock &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex &device->physical_node_lock irq_context: 0 acpi_scan_lock &dev->mutex device_links_srcu irq_context: 0 acpi_scan_lock &dev->mutex fwnode_link_lock irq_context: 0 acpi_scan_lock &dev->mutex device_links_lock irq_context: 0 acpi_scan_lock &dev->mutex fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex &dev->devres_lock irq_context: 0 acpi_scan_lock &dev->mutex pinctrl_list_mutex irq_context: 0 acpi_scan_lock &dev->mutex pinctrl_maps_mutex irq_context: 0 acpi_scan_lock &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 acpi_scan_lock &dev->mutex sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &dev->mutex lock irq_context: 0 acpi_scan_lock &dev->mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock semaphore->lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock wakeup_ida.xa_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &x->wait#9 irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock bus_type_sem irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &c->lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock uevent_sock_mutex irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock running_helpers_waitq.lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock subsys mutex#13 irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock subsys mutex#13 &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock events_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_wakeup_lock irq_context: 0 acpi_scan_lock &dev->mutex resource_lock irq_context: 0 acpi_scan_lock &dev->mutex free_vmap_area_lock irq_context: 0 acpi_scan_lock &dev->mutex vmap_area_lock irq_context: 0 acpi_scan_lock &dev->mutex init_mm.page_table_lock irq_context: 0 acpi_scan_lock &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 acpi_scan_lock &dev->mutex chrdevs_lock irq_context: 0 acpi_scan_lock &dev->mutex tty_mutex irq_context: 0 acpi_scan_lock &dev->mutex proc_subdir_lock irq_context: 0 acpi_scan_lock &dev->mutex proc_inum_ida.xa_lock irq_context: 0 acpi_scan_lock &dev->mutex proc_subdir_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex port_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex port_mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &x->wait#9 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex bus_type_sem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &c->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->power.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex dpm_list_mtx irq_context: 0 acpi_scan_lock &dev->mutex port_mutex uevent_sock_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex running_helpers_waitq.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex device_links_srcu irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex fwnode_link_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex device_links_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->devres_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex pinctrl_list_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex pinctrl_maps_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex deferred_probe_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex uevent_sock_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex running_helpers_waitq.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex probe_waitqueue.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex subsys mutex#14 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &xa->xa_lock#3 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &rq->__lock irq_context: 0 (wq_completion)pm irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock &dev->power.wait_queue irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex (console_sem).lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &port_lock_key irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex &port_lock_key irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex syslog_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex (console_sem).lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex console_lock console_srcu console_owner_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex console_lock console_srcu console_owner irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex console_lock console_srcu console_owner console_owner_lock irq_context: softirq &(&group->avgs_work)->timer irq_context: softirq &(&group->avgs_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&group->avgs_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&group->avgs_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&group->avgs_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&group->avgs_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer rcu_read_lock &pool->lock irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq mm/memcontrol.c:679 irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock &base->lock &obj_hash[i].lock irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work cgroup_rstat_lock irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work cgroup_rstat_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work &base->lock irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work &base->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#24/1 wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 &type->s_umount_key#24/1 rcu_read_lock &pool->lock irq_context: 0 &type->s_umount_key#24/1 rcu_read_lock &pool->lock (worker)->lock irq_context: 0 &type->s_umount_key#24/1 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock irq_context: 0 &type->s_umount_key#24/1 &sbi->old_work_lock irq_context: 0 &type->s_umount_key#24/1 (work_completion)(&(&sbi->old_work)->work) irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_mutex &rq->__lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex ctrl_ida.xa_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &x->wait#9 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->power.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &c->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex semaphore->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex dpm_list_mtx irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex uevent_sock_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &obj_hash[i].lock pool_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex running_helpers_waitq.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->mutex &dev->power.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->mutex &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->mutex &dev->power.lock hrtimer_bases.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &dev->mutex &dev->power.lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex subsys mutex#15 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex *(&acpi_gbl_reference_count_lock) irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &n->list_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &n->list_lock &c->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &pcp->lock &zone->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex dev_pm_qos_sysfs_mtx irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &k->k_lock klist_remove_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex deferred_probe_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &root->kernfs_rwsem pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex device_links_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex &k->list_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex fs_reclaim irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex bus_type_sem irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex req_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &p->pi_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &x->wait#11 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &rq->__lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers irq_context: 0 sb_writers mount_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rename_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 fs_reclaim irq_context: 0 sb_writers &type->i_mutex_dir_key/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key/1 pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sbinfo->stat_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_lock_key#5 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &s->s_inode_list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tk_core.seq.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 batched_entropy_u32.lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &____s->seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &xattrs->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &simple_offset_xa_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &simple_offset_xa_lock pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &xattrs->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 smack_known_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 smack_known_lock &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 smack_known_lock pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_lock_key#5 &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 tk_core.seq.seqcount irq_context: 0 &x->wait#11 irq_context: 0 &x->wait#11 &p->pi_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex subsys mutex#16 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex subsys mutex#16 &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex chrdevs_lock irq_context: 0 acpi_scan_lock &dev->mutex fwnode_link_lock &k->k_lock irq_context: 0 acpi_scan_lock &dev->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 acpi_scan_lock &dev->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 acpi_scan_lock &dev->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex deferred_probe_mutex irq_context: 0 acpi_scan_lock &dev->mutex uevent_sock_mutex irq_context: 0 acpi_scan_lock &dev->mutex running_helpers_waitq.lock irq_context: 0 acpi_scan_lock &dev->mutex probe_waitqueue.lock irq_context: 0 acpi_scan_lock subsys mutex#4 irq_context: 0 acpi_scan_lock &dev->mutex &c->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &c->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &dev->mutex &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_lock console_srcu console_owner_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_lock console_srcu console_owner irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex batched_entropy_u8.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex batched_entropy_u8.lock crngs.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex kfence_freelist_lock irq_context: 0 acpi_scan_lock &dev->mutex acpi_pm_notifier_install_lock &c->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &c->lock irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &port->mutex &device->physical_node_lock &____s->seqcount irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &pcp->lock &zone->lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 acpi_scan_lock &dev->mutex port_mutex &obj_hash[i].lock pool_lock irq_context: 0 acpi_scan_lock (console_sem).lock irq_context: 0 acpi_scan_lock console_lock console_srcu console_owner_lock irq_context: 0 acpi_scan_lock console_lock console_srcu console_owner irq_context: 0 acpi_scan_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 acpi_scan_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock &obj_hash[i].lock pool_lock irq_context: 0 acpi_scan_lock &domain->mutex sparse_irq_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 acpi_scan_lock free_vmap_area_lock irq_context: 0 acpi_scan_lock vmap_area_lock irq_context: 0 acpi_scan_lock init_mm.page_table_lock irq_context: 0 acpi_scan_lock io_range_mutex irq_context: 0 acpi_scan_lock pci_bus_sem irq_context: 0 acpi_scan_lock gdp_mutex irq_context: 0 acpi_scan_lock gdp_mutex &k->list_lock irq_context: 0 acpi_scan_lock gdp_mutex fs_reclaim irq_context: 0 acpi_scan_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock gdp_mutex pool_lock#2 irq_context: 0 acpi_scan_lock gdp_mutex lock irq_context: 0 acpi_scan_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock subsys mutex#17 irq_context: 0 acpi_scan_lock subsys mutex#17 &k->k_lock irq_context: 0 acpi_scan_lock acpi_hp_context_lock irq_context: 0 acpi_scan_lock acpi_hp_context_lock fs_reclaim irq_context: 0 acpi_scan_lock acpi_hp_context_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_hp_context_lock pool_lock#2 irq_context: 0 acpi_scan_lock bridge_mutex irq_context: 0 acpi_scan_lock pci_bus_sem irq_context: 0 acpi_scan_lock pci_lock irq_context: 0 acpi_scan_lock pci_acpi_companion_lookup_sem irq_context: 0 acpi_scan_lock pci_slot_mutex irq_context: 0 acpi_scan_lock resource_alignment_lock irq_context: 0 acpi_scan_lock device_links_srcu irq_context: 0 acpi_scan_lock &dev->power.lock &dev->power.lock/1 irq_context: 0 acpi_scan_lock iort_msi_chip_lock irq_context: 0 acpi_scan_lock subsys mutex#18 irq_context: 0 acpi_scan_lock devtree_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock semaphore->lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock fs_reclaim irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock batched_entropy_u8.lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock kfence_freelist_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock pool_lock#2 irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock fs_reclaim irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock pool_lock#2 irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock wakeup_ida.xa_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &x->wait#9 irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &k->list_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &k->list_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex fs_reclaim irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex pool_lock#2 irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock kernfs_idr_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &root->kernfs_rwsem irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock bus_type_sem irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock sysfs_symlink_target_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &c->lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &____s->seqcount irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock uevent_sock_mutex irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &obj_hash[i].lock pool_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock running_helpers_waitq.lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock &k->k_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock subsys mutex#13 irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock subsys mutex#13 &k->k_lock irq_context: 0 acpi_scan_lock acpi_pm_notifier_install_lock acpi_pm_notifier_lock events_lock irq_context: 0 acpi_scan_lock pci_rescan_remove_lock irq_context: 0 acpi_scan_lock pci_rescan_remove_lock &dev->mutex &dev->power.lock irq_context: 0 acpi_scan_lock pci_rescan_remove_lock &dev->mutex &k->list_lock irq_context: 0 acpi_scan_lock pci_rescan_remove_lock &dev->mutex &k->k_lock irq_context: 0 acpi_scan_lock acpi_link_lock irq_context: 0 acpi_scan_lock acpi_link_lock fs_reclaim irq_context: 0 acpi_scan_lock acpi_link_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 acpi_scan_lock acpi_link_lock pool_lock#2 irq_context: 0 acpi_scan_lock acpi_link_lock semaphore->lock irq_context: 0 acpi_scan_lock acpi_link_lock &obj_hash[i].lock irq_context: 0 acpi_scan_lock acpi_link_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 acpi_scan_lock acpi_link_lock (console_sem).lock irq_context: 0 acpi_scan_lock acpi_link_lock console_lock console_srcu console_owner_lock irq_context: 0 acpi_scan_lock acpi_link_lock console_lock console_srcu console_owner irq_context: 0 acpi_scan_lock acpi_link_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 acpi_scan_lock acpi_link_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 acpi_scan_lock acpi_link_lock &c->lock irq_context: 0 acpi_scan_lock acpi_link_lock &____s->seqcount irq_context: 0 acpi_scan_lock acpi_dep_list_lock irq_context: 0 acpi_scan_lock power_resource_list_lock irq_context: 0 acpi_device_lock irq_context: 0 &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem pool_lock#2 irq_context: 0 klist_remove_lock irq_context: 0 &k->k_lock klist_remove_lock irq_context: 0 kernfs_idr_lock irq_context: 0 console_lock console_srcu console_owner_lock irq_context: 0 console_lock console_srcu console_owner irq_context: 0 console_lock console_srcu console_owner &port_lock_key irq_context: 0 console_lock console_srcu console_owner console_owner_lock irq_context: 0 k-sk_lock-AF_NETLINK irq_context: 0 k-sk_lock-AF_NETLINK k-slock-AF_NETLINK irq_context: 0 k-sk_lock-AF_NETLINK rcu_read_lock rhashtable_bucket irq_context: 0 k-slock-AF_NETLINK irq_context: 0 cpu_hotplug_lock cpuhp_state-up fs_reclaim irq_context: 0 cpu_hotplug_lock cpuhp_state-up fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_hotplug_lock cpuhp_state-up lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up lock kernfs_idr_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up &root->kernfs_rwsem irq_context: 0 cpu_hotplug_lock cpuhp_state-up &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &type->s_umount_key#10/1 irq_context: 0 &type->s_umount_key#10/1 fs_reclaim irq_context: 0 &type->s_umount_key#10/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#10/1 &c->lock irq_context: 0 &type->s_umount_key#10/1 &____s->seqcount irq_context: 0 &type->s_umount_key#10/1 pool_lock#2 irq_context: 0 &type->s_umount_key#10/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#10/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#10/1 shrinker_mutex irq_context: 0 &type->s_umount_key#10/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#10/1 sb_lock irq_context: 0 &type->s_umount_key#10/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#10/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#10/1 &sb->s_type->i_lock_key#9 irq_context: 0 &type->s_umount_key#10/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#10/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#10/1 &sb->s_type->i_lock_key#9 &dentry->d_lock irq_context: 0 &type->s_umount_key#10/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#11/1 irq_context: 0 &type->s_umount_key#11/1 fs_reclaim irq_context: 0 &type->s_umount_key#11/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#11/1 pool_lock#2 irq_context: 0 &type->s_umount_key#11/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#11/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#11/1 shrinker_mutex irq_context: 0 &type->s_umount_key#11/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#11/1 sb_lock irq_context: 0 &type->s_umount_key#11/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#11/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#11/1 &sb->s_type->i_lock_key#10 irq_context: 0 &type->s_umount_key#11/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#11/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#11/1 &c->lock irq_context: 0 &type->s_umount_key#11/1 &____s->seqcount irq_context: 0 &type->s_umount_key#11/1 &sb->s_type->i_lock_key#10 &dentry->d_lock irq_context: 0 &type->s_umount_key#11/1 &dentry->d_lock irq_context: 0 &mm->mmap_lock irq_context: 0 &mm->mmap_lock reservation_ww_class_acquire irq_context: 0 &mm->mmap_lock reservation_ww_class_acquire reservation_ww_class_mutex irq_context: 0 &mm->mmap_lock reservation_ww_class_acquire reservation_ww_class_mutex fs_reclaim irq_context: 0 &mm->mmap_lock reservation_ww_class_acquire reservation_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock reservation_ww_class_acquire reservation_ww_class_mutex fs_reclaim &mapping->i_mmap_rwsem irq_context: 0 &mm->mmap_lock reservation_ww_class_acquire reservation_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start dma_fence_map irq_context: 0 &mm->mmap_lock irq_context: 0 key irq_context: 0 attribute_container_mutex irq_context: 0 triggers_list_lock irq_context: 0 leds_list_lock irq_context: 0 bus_type_sem irq_context: 0 (usb_notifier_list).rwsem irq_context: 0 &device->physical_node_lock irq_context: 0 rc_map_lock irq_context: 0 &root->kernfs_rwsem &____s->seqcount irq_context: 0 pci_lock irq_context: 0 subsys mutex#19 irq_context: 0 &(&priv->bus_notifier)->rwsem irq_context: 0 &(&priv->bus_notifier)->rwsem iommu_probe_device_lock irq_context: 0 &(&priv->bus_notifier)->rwsem iommu_probe_device_lock iommu_device_lock irq_context: 0 (efi_runtime_lock).lock irq_context: 0 &x->wait#12 irq_context: 0 (wq_completion)efi_rts_wq irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) cpu_asid_lock irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) efi_rt_lock irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) &x->wait#12 irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) &x->wait#12 &p->pi_lock irq_context: 0 (efivars_lock).lock irq_context: 0 devfreq_list_lock irq_context: 0 &entry->access irq_context: 0 info_mutex irq_context: 0 info_mutex proc_subdir_lock irq_context: 0 info_mutex fs_reclaim irq_context: 0 info_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 info_mutex &c->lock irq_context: 0 info_mutex &____s->seqcount irq_context: 0 info_mutex pool_lock#2 irq_context: 0 info_mutex proc_inum_ida.xa_lock irq_context: 0 info_mutex proc_subdir_lock irq_context: 0 kobj_ns_type_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &x->wait#9 irq_context: 0 pernet_ops_rwsem rtnl_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &k->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex lock irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex bus_type_sem irq_context: 0 pernet_ops_rwsem rtnl_mutex sysfs_symlink_target_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex &dev->power.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dpm_list_mtx irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex running_helpers_waitq.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex subsys mutex#20 irq_context: 0 pernet_ops_rwsem rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &dir->lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_base_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex input_pool.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex nl_table_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex nl_table_wait.lock irq_context: 0 rcu_read_lock pool_lock#2 irq_context: 0 rcu_read_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock nl_table_lock irq_context: 0 rcu_read_lock nl_table_wait.lock irq_context: 0 qdisc_mod_lock irq_context: 0 bt_proto_lock irq_context: 0 hci_cb_list_lock irq_context: 0 mgmt_chan_list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 rate_ctrl_mutex irq_context: 0 rate_ctrl_mutex fs_reclaim irq_context: 0 rate_ctrl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rate_ctrl_mutex pool_lock#2 irq_context: 0 netlbl_domhsh_lock irq_context: 0 netlbl_unlhsh_lock irq_context: 0 rcu_read_lock netlbl_domhsh_lock irq_context: 0 rcu_read_lock netlbl_domhsh_lock pool_lock#2 irq_context: 0 misc_mtx irq_context: 0 misc_mtx fs_reclaim irq_context: 0 misc_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx pool_lock#2 irq_context: 0 misc_mtx &x->wait#9 irq_context: 0 misc_mtx &obj_hash[i].lock irq_context: 0 misc_mtx &____s->seqcount irq_context: 0 misc_mtx &k->list_lock irq_context: 0 misc_mtx gdp_mutex irq_context: 0 misc_mtx gdp_mutex &k->list_lock irq_context: 0 misc_mtx gdp_mutex fs_reclaim irq_context: 0 misc_mtx gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx gdp_mutex pool_lock#2 irq_context: 0 misc_mtx gdp_mutex lock irq_context: 0 misc_mtx gdp_mutex lock kernfs_idr_lock irq_context: 0 misc_mtx gdp_mutex &root->kernfs_rwsem irq_context: 0 misc_mtx gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 misc_mtx lock irq_context: 0 misc_mtx lock kernfs_idr_lock irq_context: 0 misc_mtx &root->kernfs_rwsem irq_context: 0 misc_mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 misc_mtx bus_type_sem irq_context: 0 misc_mtx sysfs_symlink_target_lock irq_context: 0 misc_mtx &c->lock irq_context: 0 misc_mtx &root->kernfs_rwsem irq_context: 0 misc_mtx &dev->power.lock irq_context: 0 misc_mtx dpm_list_mtx irq_context: 0 misc_mtx req_lock irq_context: 0 misc_mtx &p->pi_lock irq_context: 0 misc_mtx &x->wait#11 irq_context: 0 misc_mtx &rq->__lock irq_context: 0 misc_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx uevent_sock_mutex irq_context: 0 misc_mtx running_helpers_waitq.lock irq_context: 0 misc_mtx subsys mutex#21 irq_context: 0 misc_mtx subsys mutex#21 &k->k_lock irq_context: 0 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex irq_context: 0 input_mutex irq_context: 0 input_mutex input_devices_poll_wait.lock irq_context: 0 (netlink_chain).rwsem irq_context: 0 proto_tab_lock irq_context: 0 random_ready_notifier.lock irq_context: 0 random_ready_notifier.lock crngs.lock irq_context: 0 misc_mtx misc_minors_ida.xa_lock irq_context: 0 misc_mtx &obj_hash[i].lock pool_lock irq_context: hardirq &rq->__lock &rt_b->rt_runtime_lock irq_context: hardirq &rq->__lock &rt_b->rt_runtime_lock tk_core.seq.seqcount irq_context: hardirq &rq->__lock &rt_b->rt_runtime_lock hrtimer_bases.lock irq_context: hardirq &rq->__lock &rt_b->rt_runtime_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 &rq->__lock &rt_rq->rt_runtime_lock irq_context: 0 wtd_deferred_reg_mutex irq_context: 0 &type->s_umount_key#12/1 irq_context: 0 &type->s_umount_key#12/1 fs_reclaim irq_context: 0 &type->s_umount_key#12/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#12/1 pool_lock#2 irq_context: 0 &type->s_umount_key#12/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#12/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#12/1 shrinker_mutex irq_context: 0 &type->s_umount_key#12/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#12/1 sb_lock irq_context: 0 &type->s_umount_key#12/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#12/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#12/1 &sb->s_type->i_lock_key#11 irq_context: 0 &type->s_umount_key#12/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#12/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#12/1 &sb->s_type->i_lock_key#11 &dentry->d_lock irq_context: 0 &type->s_umount_key#12/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_lock_key#11 irq_context: 0 clocksource_mutex cpu_hotplug_lock irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex &stopper->lock irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex &stop_pi_lock irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex &stop_pi_lock &rq->__lock irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex &stop_pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex &rq->__lock irq_context: 0 clocksource_mutex cpu_hotplug_lock stop_cpus_mutex &x->wait#8 irq_context: 0 clocksource_mutex (console_sem).lock irq_context: 0 clocksource_mutex console_lock console_srcu console_owner_lock irq_context: 0 clocksource_mutex console_lock console_srcu console_owner irq_context: 0 clocksource_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 clocksource_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 &type->s_umount_key#13/1 irq_context: 0 &type->s_umount_key#13/1 fs_reclaim irq_context: 0 &type->s_umount_key#24/1 &x->wait#23 irq_context: 0 &type->s_umount_key#13/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &disk->open_mutex rcu_node_0 irq_context: 0 &type->s_umount_key#13/1 pool_lock#2 irq_context: 0 &disk->open_mutex &rcu_state.expedited_wq irq_context: 0 &type->s_umount_key#13/1 pcpu_alloc_mutex irq_context: 0 &disk->open_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &type->s_umount_key#13/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &disk->open_mutex &rq->__lock irq_context: 0 &disk->open_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#13/1 shrinker_mutex irq_context: 0 &disk->open_mutex bdev_lock irq_context: 0 &type->s_umount_key#13/1 list_lrus_mutex irq_context: 0 &disk->open_mutex bdev_lock &bdev->bd_holder_lock irq_context: 0 &type->s_umount_key#13/1 sb_lock irq_context: 0 &xa->xa_lock#5 irq_context: 0 &type->s_umount_key#13/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 sb_lock &obj_hash[i].lock irq_context: 0 sb_lock pool_lock#2 irq_context: 0 &type->s_umount_key#13/1 mmu_notifier_invalidate_range_start irq_context: 0 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#13/1 &____s->seqcount irq_context: 0 &type->s_umount_key#13/1 &sb->s_type->i_lock_key#12 irq_context: 0 &type->s_umount_key#25/1 irq_context: 0 &type->s_umount_key#13/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#13/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#25/1 fs_reclaim irq_context: 0 &type->s_umount_key#13/1 &sb->s_type->i_lock_key#12 &dentry->d_lock irq_context: 0 &type->s_umount_key#25/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#13/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#25/1 pool_lock#2 irq_context: 0 &type->s_umount_key#25/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#25/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#25/1 shrinker_mutex irq_context: 0 &type->s_umount_key#25/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#25/1 sb_lock irq_context: 0 &sb->s_type->i_mutex_key#5 irq_context: 0 &type->s_umount_key#25/1 inode_hash_lock irq_context: 0 &sb->s_type->i_mutex_key#5 &sb->s_type->i_lock_key#12 irq_context: 0 &type->s_umount_key#25/1 inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 &sb->s_type->i_mutex_key#5 rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#5 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#25/1 bdev_lock irq_context: 0 &sb->s_type->i_mutex_key#5 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#5 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#5 rcu_read_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#5 &dentry->d_lock &wq irq_context: 0 &sb->s_type->i_mutex_key#5 &____s->seqcount irq_context: 0 &type->s_umount_key#25/1 &disk->open_mutex irq_context: 0 &type->s_umount_key#25/1 &disk->open_mutex bdev_lock irq_context: 0 &sb->s_type->i_mutex_key#5 &c->lock irq_context: 0 &sb->s_type->i_mutex_key#5 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#25/1 &disk->open_mutex bdev_lock &bdev->bd_holder_lock irq_context: 0 &sb->s_type->i_mutex_key#5 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#25/1 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#5 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#25/1 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#5 &sb->s_type->i_lock_key#12 &dentry->d_lock irq_context: 0 &type->s_umount_key#25/1 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#25/1 lock#4 irq_context: 0 &type->s_umount_key#25/1 &mapping->i_private_lock irq_context: 0 &type->s_umount_key#25/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#25/1 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#15/1 irq_context: 0 &type->s_umount_key#15/1 fs_reclaim irq_context: 0 &type->s_umount_key#25/1 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->s_umount_key#15/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#25/1 bit_wait_table + i irq_context: 0 &type->s_umount_key#15/1 pool_lock#2 irq_context: 0 &type->s_umount_key#25/1 &rq->__lock irq_context: 0 &type->s_umount_key#15/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#25/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#15/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#25/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#15/1 shrinker_mutex irq_context: 0 &type->s_umount_key#25/1 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#25/1 &sb->s_type->i_lock_key#3 irq_context: 0 &type->s_umount_key#15/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#15/1 sb_lock irq_context: 0 &type->s_umount_key#15/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#25/1 &sb->s_type->i_lock_key#3 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#15/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#25/1 lock#4 &lruvec->lru_lock irq_context: 0 &type->s_umount_key#15/1 &sb->s_type->i_lock_key#13 irq_context: 0 &type->s_umount_key#25/1 lock#5 irq_context: 0 &type->s_umount_key#15/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#25/1 &lruvec->lru_lock irq_context: 0 &type->s_umount_key#15/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#25/1 &c->lock irq_context: 0 &type->s_umount_key#15/1 &sb->s_type->i_lock_key#13 &dentry->d_lock irq_context: 0 &type->s_umount_key#25/1 crypto_alg_sem irq_context: 0 &type->s_umount_key#15/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#25/1 lock#2 irq_context: 0 &type->s_umount_key#25/1 lock#2 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#25/1 lock#2 rcu_read_lock &pool->lock irq_context: 0 &type->s_umount_key#25/1 lock#2 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#25/1 lock#2 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &type->s_umount_key#25/1 lock#2 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#25/1 lock#2 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#25/1 lock#2 &rq->__lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(work) irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(work) lock#4 irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(work) lock#4 &lruvec->lru_lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(work) lock#5 irq_context: 0 &type->s_umount_key#25/1 lock#2 (work_completion)(work) irq_context: 0 &type->s_umount_key#25/1 &x->wait#23 irq_context: 0 &type->s_umount_key#26/1 irq_context: 0 &type->s_umount_key#26/1 fs_reclaim irq_context: 0 &type->s_umount_key#26/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#16/1 irq_context: 0 &type->s_umount_key#26/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#16/1 fs_reclaim irq_context: 0 &type->s_umount_key#16/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 shrinker_mutex irq_context: 0 &type->s_umount_key#16/1 pool_lock#2 irq_context: 0 &type->s_umount_key#16/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#26/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#16/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#26/1 sb_lock irq_context: 0 &type->s_umount_key#26/1 inode_hash_lock irq_context: 0 &type->s_umount_key#26/1 inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 &type->s_umount_key#26/1 bdev_lock irq_context: 0 &type->s_umount_key#16/1 shrinker_mutex irq_context: 0 &type->s_umount_key#16/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#16/1 sb_lock irq_context: 0 &type->s_umount_key#16/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#16/1 &c->lock irq_context: 0 &type->s_umount_key#16/1 &____s->seqcount irq_context: 0 &type->s_umount_key#16/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 &disk->open_mutex irq_context: 0 &type->s_umount_key#16/1 &sb->s_type->i_lock_key#14 irq_context: 0 &type->s_umount_key#26/1 &disk->open_mutex bdev_lock irq_context: 0 &type->s_umount_key#16/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#16/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#16/1 &sb->s_type->i_lock_key#14 &dentry->d_lock irq_context: 0 &type->s_umount_key#16/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#26/1 &disk->open_mutex bdev_lock &bdev->bd_holder_lock irq_context: 0 &type->s_umount_key#26/1 &c->lock irq_context: 0 &type->s_umount_key#26/1 &____s->seqcount irq_context: 0 &type->s_umount_key#26/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#26/1 lock#4 irq_context: 0 &type->s_umount_key#26/1 &mapping->i_private_lock irq_context: 0 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#17/1 irq_context: 0 &type->s_umount_key#17/1 fs_reclaim irq_context: 0 &type->s_umount_key#17/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#17/1 pool_lock#2 irq_context: 0 &type->s_umount_key#17/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#17/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#17/1 shrinker_mutex irq_context: 0 &type->s_umount_key#17/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#17/1 sb_lock irq_context: 0 &type->s_umount_key#17/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#17/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#17/1 &sb->s_type->i_lock_key#15 irq_context: 0 &type->s_umount_key#17/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#17/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#17/1 &sb->s_type->i_lock_key#15 &dentry->d_lock irq_context: 0 &type->s_umount_key#17/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_lock_key#15 irq_context: hardirq tick_broadcast_lock irq_context: hardirq tick_broadcast_lock jiffies_lock irq_context: 0 (wq_completion)events timer_update_work irq_context: 0 (wq_completion)events timer_update_work timer_keys_mutex irq_context: 0 (wq_completion)events timer_update_work timer_keys_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)events timer_update_work timer_keys_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 (wq_completion)events timer_update_work timer_keys_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 kclist_lock irq_context: 0 kclist_lock resource_lock irq_context: 0 kclist_lock fs_reclaim irq_context: 0 kclist_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kclist_lock pool_lock#2 irq_context: 0 kclist_lock &c->lock irq_context: 0 kclist_lock &____s->seqcount irq_context: 0 &type->s_umount_key#18/1 irq_context: 0 &type->s_umount_key#18/1 fs_reclaim irq_context: 0 &type->s_umount_key#18/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#18/1 pool_lock#2 irq_context: 0 &type->s_umount_key#18/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#18/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#18/1 shrinker_mutex irq_context: 0 &type->s_umount_key#18/1 &c->lock irq_context: 0 &type->s_umount_key#18/1 &____s->seqcount irq_context: 0 &type->s_umount_key#18/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#18/1 sb_lock irq_context: 0 &type->s_umount_key#18/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#18/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#18/1 &sb->s_type->i_lock_key#16 irq_context: 0 &type->s_umount_key#18/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#18/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#18/1 &sb->s_type->i_lock_key#16 &dentry->d_lock irq_context: 0 &type->s_umount_key#18/1 &dentry->d_lock irq_context: 0 misc_mtx &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#26/1 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#2 &c->lock irq_context: 0 &sb->s_type->i_mutex_key#2 &____s->seqcount irq_context: 0 tomoyo_ss irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 tomoyo_ss pool_lock#2 irq_context: 0 tomoyo_ss tomoyo_policy_lock irq_context: 0 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 tomoyo_ss (console_sem).lock irq_context: 0 tomoyo_ss console_lock console_srcu console_owner_lock irq_context: 0 tomoyo_ss console_lock console_srcu console_owner irq_context: 0 tomoyo_ss console_lock console_srcu console_owner &port_lock_key irq_context: 0 tomoyo_ss console_lock console_srcu console_owner console_owner_lock irq_context: hardirq &rt_b->rt_runtime_lock irq_context: hardirq &rt_b->rt_runtime_lock tk_core.seq.seqcount irq_context: hardirq &rt_rq->rt_runtime_lock irq_context: 0 pnp_lock irq_context: 0 pnp_lock fs_reclaim irq_context: 0 pnp_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pnp_lock pool_lock#2 irq_context: 0 &device->physical_node_lock sysfs_symlink_target_lock irq_context: 0 &device->physical_node_lock fs_reclaim irq_context: 0 &device->physical_node_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &device->physical_node_lock pool_lock#2 irq_context: 0 &device->physical_node_lock lock irq_context: 0 &device->physical_node_lock lock kernfs_idr_lock irq_context: 0 &device->physical_node_lock &root->kernfs_rwsem irq_context: 0 &device->physical_node_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 fwnode_link_lock irq_context: 0 fwnode_link_lock &k->k_lock irq_context: 0 &dev->mutex device_links_srcu irq_context: 0 &dev->mutex fwnode_link_lock irq_context: 0 &dev->mutex device_links_lock irq_context: 0 &dev->mutex fs_reclaim irq_context: 0 &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex pool_lock#2 irq_context: 0 &dev->mutex &dev->devres_lock irq_context: 0 &dev->mutex pinctrl_list_mutex irq_context: 0 &dev->mutex pinctrl_maps_mutex irq_context: 0 &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 &dev->mutex &obj_hash[i].lock irq_context: 0 &dev->mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex lock irq_context: 0 &dev->mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex pnp_lock irq_context: 0 &dev->mutex resource_lock irq_context: 0 &dev->mutex (console_sem).lock irq_context: 0 &dev->mutex console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex console_lock console_srcu console_owner irq_context: 0 &dev->mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 &type->s_umount_key#26/1 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &dev->mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex fwnode_link_lock &k->k_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &dev->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 &dev->mutex deferred_probe_mutex irq_context: 0 &dev->mutex uevent_sock_mutex irq_context: 0 &dev->mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex probe_waitqueue.lock irq_context: 0 subsys mutex#22 irq_context: 0 &type->s_umount_key#26/1 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->s_umount_key#26/1 bit_wait_table + i irq_context: 0 &type->s_umount_key#26/1 &rq->__lock irq_context: 0 &type->s_umount_key#26/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#26/1 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_lock_key#3 irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_lock_key#3 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#26/1 lock#4 &lruvec->lru_lock irq_context: 0 &type->s_umount_key#26/1 lock#5 irq_context: 0 &type->s_umount_key#26/1 &lruvec->lru_lock irq_context: 0 &type->s_umount_key#26/1 crypto_alg_sem irq_context: 0 &type->s_umount_key#26/1 pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 percpu_counters_lock irq_context: 0 &type->s_umount_key#26/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#26/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#26/1 inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 &type->s_umount_key#26/1 inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#26/1 rcu_read_lock pool_lock#2 irq_context: 0 jiffies_seq.seqcount irq_context: 0 subsys mutex#23 irq_context: 0 subsys mutex#23 &k->k_lock irq_context: 0 subsys mutex#16 irq_context: 0 subsys mutex#16 &k->k_lock irq_context: 0 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 fill_pool_map-wait-type-override pool_lock irq_context: 0 subsys mutex#24 irq_context: 0 subsys mutex#24 &k->k_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &pcp->lock &zone->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 fill_pool_map-wait-type-override &c->lock irq_context: softirq rcu_callback pcpu_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &meta->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 kfence_freelist_lock irq_context: 0 hrtimer_bases.lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 hrtimer_bases.lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 hrtimer_bases.lock fill_pool_map-wait-type-override &c->lock irq_context: 0 hrtimer_bases.lock fill_pool_map-wait-type-override pool_lock irq_context: softirq led_lock irq_context: 0 misc_mtx lock kernfs_idr_lock pool_lock#2 irq_context: 0 subsys mutex#25 irq_context: 0 subsys mutex#25 &k->list_lock irq_context: 0 subsys mutex#25 &k->k_lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &xa->xa_lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex kthread_create_lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &p->pi_lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &x->wait irq_context: 0 cpu_hotplug_lock wq_pool_mutex &rq->__lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock wq_pool_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex &p->pi_lock &rq->__lock irq_context: 0 cpu_hotplug_lock wq_pool_mutex wq_pool_attach_mutex irq_context: 0 cpu_hotplug_lock wq_pool_mutex &pool->lock &p->pi_lock irq_context: 0 netevent_notif_chain.lock irq_context: 0 clients_rwsem irq_context: 0 clients_rwsem fs_reclaim irq_context: 0 clients_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 clients_rwsem clients.xa_lock irq_context: 0 devices_rwsem irq_context: 0 clients_rwsem clients.xa_lock pool_lock#2 irq_context: 0 hrtimer_bases.lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 hrtimer_bases.lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 hrtimer_bases.lock fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 (blocking_lsm_notifier_chain).rwsem irq_context: 0 pernet_ops_rwsem &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (inetaddr_chain).rwsem irq_context: 0 inet6addr_chain.lock irq_context: 0 buses_mutex irq_context: 0 offload_lock irq_context: 0 inetsw_lock irq_context: 0 (wq_completion)events pcpu_balance_work irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex fs_reclaim irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex pool_lock#2 irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex free_vmap_area_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex vmap_area_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &c->lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &____s->seqcount irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex init_mm.page_table_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &obj_hash[i].lock irq_context: 0 ptype_lock irq_context: 0 (wq_completion)events_power_efficient irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &tbl->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &tbl->lock &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &tbl->lock &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nl_table_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex batched_entropy_u32.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &tbl->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex sysctl_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work irq_context: 0 pernet_ops_rwsem &net->rules_mod_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &base->lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem slab_mutex irq_context: 0 pernet_ops_rwsem slab_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem slab_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem slab_mutex pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem batched_entropy_u32.lock irq_context: 0 pernet_ops_rwsem &obj_hash[i].lock pool_lock irq_context: 0 tcp_ulp_list_lock irq_context: 0 xfrm_state_afinfo_lock irq_context: 0 xfrm_policy_afinfo_lock irq_context: 0 xfrm_input_afinfo_lock irq_context: 0 pernet_ops_rwsem percpu_counters_lock irq_context: 0 rtnl_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex krc.lock irq_context: 0 rtnl_mutex krc.lock &obj_hash[i].lock irq_context: 0 rtnl_mutex krc.lock hrtimer_bases.lock irq_context: 0 rtnl_mutex krc.lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 rtnl_mutex krc.lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 rtnl_mutex krc.lock &base->lock irq_context: 0 rtnl_mutex krc.lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem k-slock-AF_INET/1 irq_context: 0 (wq_completion)events_highpri irq_context: 0 (wq_completion)events_highpri (work_completion)(&(&krcp->page_cache_work)->work) irq_context: 0 (wq_completion)events_highpri (work_completion)(&(&krcp->page_cache_work)->work) fs_reclaim irq_context: 0 (wq_completion)events_highpri (work_completion)(&(&krcp->page_cache_work)->work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_highpri (work_completion)(&(&krcp->page_cache_work)->work) &____s->seqcount irq_context: 0 (wq_completion)events_highpri (work_completion)(&(&krcp->page_cache_work)->work) krc.lock irq_context: 0 &hashinfo->lock irq_context: 0 k-slock-AF_INET/1 irq_context: 0 rcu_read_lock &c->lock irq_context: 0 rcu_read_lock &____s->seqcount irq_context: 0 tcp_cong_list_lock irq_context: 0 mptcp_sched_list_lock irq_context: 0 pernet_ops_rwsem cache_list_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) cache_list_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &base->lock &obj_hash[i].lock irq_context: 0 (rpc_pipefs_notifier_list).rwsem irq_context: 0 svc_xprt_class_lock irq_context: 0 xprt_list_lock irq_context: 0 xprt_list_lock (console_sem).lock irq_context: 0 xprt_list_lock console_lock console_srcu console_owner_lock irq_context: 0 xprt_list_lock console_lock console_srcu console_owner irq_context: 0 xprt_list_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 xprt_list_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 umhelper_sem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) fs_reclaim irq_context: 0 umhelper_sem usermodehelper_disabled_waitq.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) rcu_read_lock init_fs.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_mutex_key &dentry->d_lock &wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 mount_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_log_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_log_wait.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_lock_key#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 &s->s_inode_list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_lock_key#2 &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss tomoyo_log_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss tomoyo_log_wait.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss tomoyo_policy_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key &sb->s_type->i_lock_key#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key &wb->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key &wb->list_lock &sb->s_type->i_lock_key#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &c->lock irq_context: 0 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &sb->s_type->i_lock_key#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &sighand->siglock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) free_vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) init_mm.page_table_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) batched_entropy_u64.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) init_files.file_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) init_fs.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) init_fs.lock &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &p->alloc_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &sighand->siglock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) pidmap_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem css_set_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem tasklist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem tasklist_lock &sighand->siglock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem css_set_lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) input_pool.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &drv->dynids.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) sb_writers#2 &sb->s_type->i_mutex_key tomoyo_ss &rq->__lock irq_context: 0 umh_sysctl_lock irq_context: 0 async_done.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) quarantine_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_done.lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_done.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_done.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_done.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) async_done.lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key irq_context: 0 &sb->s_type->i_mutex_key fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key rcu_read_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key &dentry->d_lock &wq irq_context: 0 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &dentry->d_lock pool_lock#2 irq_context: 0 &tsk->futex_exit_mutex irq_context: 0 &tsk->futex_exit_mutex &p->pi_lock irq_context: 0 &p->alloc_lock &fs->lock irq_context: 0 &child->perf_event_mutex irq_context: 0 css_set_lock irq_context: 0 tasklist_lock irq_context: 0 tasklist_lock &pid->wait_pidfd irq_context: 0 tasklist_lock &sighand->siglock irq_context: 0 tasklist_lock &sighand->siglock &sig->wait_chldexit irq_context: 0 tasklist_lock &sighand->siglock input_pool.lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 &obj_hash[i].lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pool_lock#2 irq_context: 0 tasklist_lock &obj_hash[i].lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &obj_hash[i].lock irq_context: 0 &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &obj_hash[i].lock pool_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex pool_lock#2 irq_context: 0 misc_mtx &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 misc_mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 subsys mutex#26 irq_context: 0 subsys mutex#27 irq_context: 0 subsys mutex#27 &k->list_lock irq_context: 0 subsys mutex#27 &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &obj_hash[i].lock pool_lock irq_context: 0 &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock pool_lock#2 irq_context: 0 subsys mutex#28 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) batched_entropy_u64.lock crngs.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock &obj_hash[i].lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock pool_lock#2 irq_context: 0 pmus_lock fs_reclaim irq_context: 0 pmus_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pmus_lock &k->list_lock irq_context: 0 pmus_lock lock irq_context: 0 pmus_lock lock kernfs_idr_lock irq_context: 0 pmus_lock &root->kernfs_rwsem irq_context: 0 pmus_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 pmus_lock uevent_sock_mutex irq_context: 0 pmus_lock rcu_read_lock &pool->lock irq_context: 0 pmus_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pmus_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 pmus_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pmus_lock running_helpers_waitq.lock irq_context: 0 pmus_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 pmus_lock &c->lock irq_context: 0 pmus_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pmus_lock &x->wait#9 irq_context: 0 pmus_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pmus_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 pmus_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 pmus_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 pmus_lock bus_type_sem irq_context: 0 pmus_lock sysfs_symlink_target_lock irq_context: 0 pmus_lock &k->k_lock irq_context: 0 pmus_lock &root->kernfs_rwsem irq_context: 0 pmus_lock &dev->power.lock irq_context: 0 pmus_lock dpm_list_mtx irq_context: 0 pmus_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pmus_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pmus_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pmus_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pmus_lock &dev->mutex &k->list_lock irq_context: 0 pmus_lock &dev->mutex &k->k_lock irq_context: 0 pmus_lock &dev->mutex &dev->power.lock irq_context: 0 pmus_lock subsys mutex#29 irq_context: 0 key_user_lock irq_context: 0 key_serial_lock irq_context: 0 key_construction_mutex irq_context: 0 &type->lock_class irq_context: 0 &type->lock_class keyring_serialise_link_lock irq_context: 0 &type->lock_class keyring_serialise_link_lock fs_reclaim irq_context: 0 &type->lock_class keyring_serialise_link_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->lock_class keyring_serialise_link_lock pool_lock#2 irq_context: 0 &type->lock_class keyring_serialise_link_lock &obj_hash[i].lock irq_context: 0 keyring_serialise_link_lock irq_context: 0 &pgdat->kswapd_lock fs_reclaim irq_context: 0 &pgdat->kswapd_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &pgdat->kswapd_lock pool_lock#2 irq_context: 0 &pgdat->kswapd_lock kthread_create_lock irq_context: 0 &pgdat->kswapd_lock &p->pi_lock irq_context: 0 &pgdat->kswapd_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &pgdat->kswapd_lock &x->wait irq_context: 0 &pgdat->kswapd_lock &rq->__lock irq_context: 0 &pgdat->kswapd_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pgdat->kswapd_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key &c->lock irq_context: 0 &sb->s_type->i_mutex_key &____s->seqcount irq_context: 0 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 &pgdat->kswapd_lock &cfs_rq->removed.lock irq_context: 0 &pgdat->kswapd_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &pgdat->kswapd_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &pgdat->kswapd_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &pgdat->kswapd_lock &obj_hash[i].lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override pool_lock irq_context: 0 &pgdat->kswapd_lock &p->pi_lock &rq->__lock irq_context: 0 &pgdat->kswapd_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pgdat->kswapd_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 list_lrus_mutex irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex fs_reclaim irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &c->lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &____s->seqcount irq_context: 0 drivers_lock irq_context: 0 damon_dbgfs_lock irq_context: 0 damon_dbgfs_lock fs_reclaim irq_context: 0 damon_dbgfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 damon_dbgfs_lock pool_lock#2 irq_context: 0 damon_dbgfs_lock damon_ops_lock irq_context: 0 damon_dbgfs_lock pin_fs_lock irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 damon_dbgfs_lock &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 &pgdat->kswapd_wait irq_context: 0 misc_mtx rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#19/1 irq_context: 0 &type->s_umount_key#19/1 fs_reclaim irq_context: 0 &type->s_umount_key#19/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#19/1 pool_lock#2 irq_context: 0 &type->s_umount_key#19/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#19/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#19/1 shrinker_mutex irq_context: 0 &type->s_umount_key#19/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#19/1 sb_lock irq_context: 0 &type->s_umount_key#19/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#19/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#19/1 &sb->s_type->i_lock_key#17 irq_context: 0 &type->s_umount_key#19/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#19/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#19/1 &sb->s_type->i_lock_key#17 &dentry->d_lock irq_context: 0 &type->s_umount_key#19/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#20/1 irq_context: 0 &type->s_umount_key#20/1 fs_reclaim irq_context: 0 &type->s_umount_key#20/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#20/1 pool_lock#2 irq_context: 0 &type->s_umount_key#20/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#20/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#20/1 shrinker_mutex irq_context: 0 &type->s_umount_key#20/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#20/1 sb_lock irq_context: 0 &type->s_umount_key#20/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#20/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#20/1 &sb->s_type->i_lock_key#18 irq_context: 0 &type->s_umount_key#20/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#20/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#20/1 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &type->s_umount_key#20/1 &dentry->d_lock irq_context: 0 configfs_subsystem_mutex irq_context: 0 &sb->s_type->i_mutex_key#6/1 irq_context: 0 &sb->s_type->i_mutex_key#6/1 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &c->lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 configfs_dirent_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &sb->s_type->i_lock_key#18 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 irq_context: 0 misc_mtx lock kernfs_idr_lock &c->lock irq_context: 0 misc_mtx lock kernfs_idr_lock &____s->seqcount irq_context: 0 misc_mtx rcu_read_lock &rq->__lock irq_context: 0 misc_mtx rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 slab_mutex rcu_read_lock &pool->lock irq_context: 0 slab_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 slab_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 slab_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 slab_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 slab_mutex &rq->__lock irq_context: 0 ecryptfs_daemon_hash_mux irq_context: 0 ecryptfs_daemon_hash_mux fs_reclaim irq_context: 0 ecryptfs_daemon_hash_mux fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ecryptfs_kthread_ctl.wait irq_context: 0 ecryptfs_daemon_hash_mux pool_lock#2 irq_context: 0 ecryptfs_msg_ctx_lists_mux irq_context: 0 ecryptfs_msg_ctx_lists_mux &ecryptfs_msg_ctx_arr[i].mux irq_context: 0 misc_mtx rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 misc_mtx rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem tk_core.seq.seqcount irq_context: 0 pernet_ops_rwsem &k->list_lock irq_context: 0 pernet_ops_rwsem lock irq_context: 0 pernet_ops_rwsem lock kernfs_idr_lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 pernet_ops_rwsem running_helpers_waitq.lock irq_context: 0 nfs_version_lock irq_context: 0 key_types_sem irq_context: 0 key_types_sem (console_sem).lock irq_context: 0 key_types_sem console_lock console_srcu console_owner_lock irq_context: 0 key_types_sem console_lock console_srcu console_owner irq_context: 0 key_types_sem console_lock console_srcu console_owner &port_lock_key irq_context: 0 key_types_sem console_lock console_srcu console_owner console_owner_lock irq_context: 0 pnfs_spinlock irq_context: 0 pernet_ops_rwsem &sn->pipefs_sb_lock irq_context: 0 pernet_ops_rwsem krc.lock irq_context: 0 pernet_ops_rwsem krc.lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem krc.lock hrtimer_bases.lock irq_context: 0 pernet_ops_rwsem krc.lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 pernet_ops_rwsem krc.lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem krc.lock &base->lock irq_context: 0 pernet_ops_rwsem krc.lock &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem &rq->__lock irq_context: 0 pernet_ops_rwsem &s->s_inode_list_lock irq_context: 0 nls_lock irq_context: softirq rcu_callback put_task_map-wait-type-override &obj_hash[i].lock irq_context: softirq rcu_callback put_task_map-wait-type-override pool_lock#2 irq_context: softirq &(&cache_cleaner)->timer irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 jffs2_compressor_list_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &c->lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq rcu_callback &obj_hash[i].lock pool_lock irq_context: softirq rcu_callback put_task_map-wait-type-override &obj_hash[i].lock pool_lock irq_context: 0 next_tag_value_lock irq_context: softirq (&tcp_orphan_timer) irq_context: softirq (&tcp_orphan_timer) &obj_hash[i].lock irq_context: softirq (&tcp_orphan_timer) &base->lock irq_context: softirq (&tcp_orphan_timer) &base->lock &obj_hash[i].lock irq_context: 0 log_redrive_lock irq_context: 0 &TxAnchor.LazyLock irq_context: 0 &TxAnchor.LazyLock jfs_commit_thread_wait.lock irq_context: 0 jfsTxnLock irq_context: 0 ocfs2_stack_lock irq_context: 0 ocfs2_stack_lock (console_sem).lock irq_context: 0 ocfs2_stack_lock console_lock console_srcu console_owner_lock irq_context: 0 ocfs2_stack_lock console_lock console_srcu console_owner irq_context: 0 ocfs2_stack_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 ocfs2_stack_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 misc_mtx &pcp->lock &zone->lock irq_context: 0 misc_mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx &p->pi_lock &cfs_rq->removed.lock irq_context: 0 o2hb_callback_sem irq_context: 0 o2net_handler_lock irq_context: 0 subsys mutex#30 irq_context: 0 subsys mutex#30 &k->k_lock irq_context: 0 &type->s_umount_key#21/1 irq_context: 0 &type->s_umount_key#21/1 fs_reclaim irq_context: 0 &type->s_umount_key#21/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#21/1 pool_lock#2 irq_context: 0 &type->s_umount_key#21/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#21/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#21/1 shrinker_mutex irq_context: 0 &type->s_umount_key#21/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#21/1 sb_lock irq_context: 0 &type->s_umount_key#21/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#21/1 &____s->seqcount irq_context: 0 &type->s_umount_key#21/1 &c->lock irq_context: 0 &type->s_umount_key#21/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#21/1 &sb->s_type->i_lock_key#19 irq_context: 0 &type->s_umount_key#21/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#21/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#21/1 &sb->s_type->i_lock_key#19 &dentry->d_lock irq_context: 0 &type->s_umount_key#21/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#22/1 irq_context: 0 &type->s_umount_key#22/1 fs_reclaim irq_context: 0 &type->s_umount_key#22/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#22/1 pool_lock#2 irq_context: 0 &type->s_umount_key#22/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#22/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#22/1 shrinker_mutex irq_context: 0 &type->s_umount_key#22/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#22/1 sb_lock irq_context: 0 &type->s_umount_key#22/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#22/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#22/1 &sb->s_type->i_lock_key#20 irq_context: 0 &type->s_umount_key#22/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#22/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#22/1 &sb->s_type->i_lock_key#20 &dentry->d_lock irq_context: 0 &type->s_umount_key#22/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#22/1 &c->lock irq_context: 0 &type->s_umount_key#22/1 &____s->seqcount irq_context: 0 cipso_v4_doi_list_lock irq_context: 0 pernet_ops_rwsem nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock jump_label_mutex irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 crypto_alg_sem irq_context: 0 alg_types_sem irq_context: 0 alg_types_sem fs_reclaim irq_context: 0 alg_types_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 alg_types_sem pool_lock#2 irq_context: 0 dma_list_mutex irq_context: 0 asymmetric_key_parsers_sem irq_context: 0 asymmetric_key_parsers_sem (console_sem).lock irq_context: 0 asymmetric_key_parsers_sem console_lock console_srcu console_owner_lock irq_context: 0 asymmetric_key_parsers_sem console_lock console_srcu console_owner irq_context: 0 asymmetric_key_parsers_sem console_lock console_srcu console_owner &port_lock_key irq_context: 0 asymmetric_key_parsers_sem console_lock console_srcu console_owner console_owner_lock irq_context: 0 blkcg_pol_register_mutex irq_context: 0 blkcg_pol_register_mutex blkcg_pol_mutex irq_context: 0 blkcg_pol_register_mutex cgroup_mutex irq_context: 0 blkcg_pol_register_mutex cgroup_mutex &root->kernfs_rwsem irq_context: 0 blkcg_pol_register_mutex blkcg_pol_mutex fs_reclaim irq_context: 0 blkcg_pol_register_mutex blkcg_pol_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 blkcg_pol_register_mutex blkcg_pol_mutex pool_lock#2 irq_context: 0 blkcg_pol_register_mutex cgroup_mutex fs_reclaim irq_context: 0 blkcg_pol_register_mutex cgroup_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 blkcg_pol_register_mutex cgroup_mutex pool_lock#2 irq_context: 0 blkcg_pol_register_mutex cgroup_mutex lock irq_context: 0 blkcg_pol_register_mutex cgroup_mutex lock kernfs_idr_lock irq_context: 0 blkcg_pol_register_mutex cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 elv_list_lock irq_context: 0 crc_t10dif_mutex irq_context: 0 crc_t10dif_mutex crypto_alg_sem irq_context: 0 crc_t10dif_mutex fs_reclaim irq_context: 0 crc_t10dif_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 crc_t10dif_mutex pool_lock#2 irq_context: 0 crc64_rocksoft_mutex irq_context: 0 crc64_rocksoft_mutex crypto_alg_sem irq_context: 0 crc64_rocksoft_mutex fs_reclaim irq_context: 0 crc64_rocksoft_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 crc64_rocksoft_mutex pool_lock#2 irq_context: 0 ts_mod_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 pci_ep_cfs_subsys.su_mutex irq_context: 0 &default_group_class[depth - 1]#2/1 irq_context: 0 &default_group_class[depth - 1]#2/1 fs_reclaim irq_context: 0 &default_group_class[depth - 1]#2/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &default_group_class[depth - 1]#2/1 pool_lock#2 irq_context: 0 &default_group_class[depth - 1]#2/1 &dentry->d_lock irq_context: 0 &default_group_class[depth - 1]#2/1 configfs_dirent_lock irq_context: 0 &default_group_class[depth - 1]#2/1 mmu_notifier_invalidate_range_start irq_context: 0 &default_group_class[depth - 1]#2/1 &sb->s_type->i_lock_key#18 irq_context: 0 &default_group_class[depth - 1]#2/1 &s->s_inode_list_lock irq_context: 0 &default_group_class[depth - 1]#2/1 tk_core.seq.seqcount irq_context: 0 &default_group_class[depth - 1]#2/1 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &default_group_class[depth - 1]#2/1 &sb->s_type->i_mutex_key#7/2 irq_context: 0 &x->wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &sb->s_type->i_mutex_key#7/2 irq_context: 0 pci_epf_mutex irq_context: 0 ipmi_interfaces_mutex irq_context: 0 ipmi_interfaces_mutex &k->list_lock irq_context: 0 ipmi_interfaces_mutex fs_reclaim irq_context: 0 ipmi_interfaces_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 ipmi_interfaces_mutex pool_lock#2 irq_context: 0 ipmi_interfaces_mutex lock irq_context: 0 ipmi_interfaces_mutex lock kernfs_idr_lock irq_context: 0 ipmi_interfaces_mutex &root->kernfs_rwsem irq_context: 0 ipmi_interfaces_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 ipmi_interfaces_mutex &k->k_lock irq_context: 0 ipmi_interfaces_mutex uevent_sock_mutex irq_context: 0 ipmi_interfaces_mutex &obj_hash[i].lock irq_context: 0 ipmi_interfaces_mutex rcu_read_lock &pool->lock irq_context: 0 ipmi_interfaces_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 ipmi_interfaces_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 ipmi_interfaces_mutex running_helpers_waitq.lock irq_context: 0 ipmi_interfaces_mutex pcpu_alloc_mutex irq_context: 0 ipmi_interfaces_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 ipmi_interfaces_mutex &obj_hash[i].lock pool_lock irq_context: 0 ipmi_interfaces_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 ipmi_interfaces_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 ipmi_interfaces_mutex cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 ipmi_interfaces_mutex kthread_create_lock irq_context: 0 ipmi_interfaces_mutex &p->pi_lock irq_context: 0 ipmi_interfaces_mutex &p->pi_lock &rq->__lock irq_context: 0 ipmi_interfaces_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ipmi_interfaces_mutex &x->wait irq_context: 0 ipmi_interfaces_mutex &rq->__lock irq_context: 0 ipmi_interfaces_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback rcu_read_lock &pool->lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&p->wq) irq_context: 0 (wq_completion)events (work_completion)(&p->wq) vmap_area_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) purge_vmap_area_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) pool_lock#2 irq_context: 0 ipmi_interfaces_mutex &cfs_rq->removed.lock irq_context: 0 ipmi_interfaces_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 ipmi_interfaces_mutex wq_pool_mutex irq_context: 0 ipmi_interfaces_mutex wq_pool_mutex &wq->mutex irq_context: 0 ipmi_interfaces_mutex &base->lock irq_context: 0 ipmi_interfaces_mutex &base->lock &obj_hash[i].lock irq_context: 0 ipmi_interfaces_mutex panic_notifier_list.lock irq_context: 0 smi_watchers_mutex irq_context: 0 smi_watchers_mutex &ipmi_interfaces_srcu irq_context: 0 smi_infos_lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex &device->physical_node_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock semaphore->lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock fs_reclaim irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock pool_lock#2 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock fs_reclaim irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock pool_lock#2 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock wakeup_ida.xa_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &x->wait#9 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &obj_hash[i].lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &k->list_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &k->list_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex fs_reclaim irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock kernfs_idr_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock bus_type_sem irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock sysfs_symlink_target_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &____s->seqcount irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &c->lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock uevent_sock_mutex irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock rcu_read_lock &pool->lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock running_helpers_waitq.lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock &k->k_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock subsys mutex#13 irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock subsys mutex#13 &k->k_lock irq_context: 0 &dev->mutex acpi_pm_notifier_install_lock acpi_pm_notifier_lock events_lock irq_context: 0 &dev->mutex acpi_wakeup_lock irq_context: 0 &dev->mutex semaphore->lock irq_context: 0 &dev->mutex *(&acpi_gbl_reference_count_lock) irq_context: 0 &dev->mutex irq_domain_mutex irq_context: 0 &dev->mutex &domain->mutex irq_context: 0 &dev->mutex &c->lock irq_context: 0 &dev->mutex &____s->seqcount irq_context: 0 &dev->mutex kthread_create_lock irq_context: 0 &dev->mutex &p->pi_lock irq_context: 0 &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &x->wait irq_context: 0 &dev->mutex &rq->__lock irq_context: 0 &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key &rq->__lock irq_context: softirq rcu_callback put_task_map-wait-type-override quarantine_lock irq_context: 0 &dev->mutex &cfs_rq->removed.lock irq_context: 0 &dev->mutex &desc->request_mutex irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class irq_controller_lock irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock irq_context: 0 &dev->mutex &desc->wait_for_threads irq_context: 0 &desc->wait_for_threads irq_context: 0 &desc->wait_for_threads &p->pi_lock irq_context: 0 &desc->wait_for_threads &p->pi_lock &rq->__lock irq_context: 0 &desc->wait_for_threads &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->pi_lock &rq->__lock &rt_b->rt_runtime_lock irq_context: 0 &p->pi_lock &rq->__lock &rt_b->rt_runtime_lock tk_core.seq.seqcount irq_context: 0 &p->pi_lock &rq->__lock &rt_b->rt_runtime_lock hrtimer_bases.lock irq_context: 0 &p->pi_lock &rq->__lock &rt_b->rt_runtime_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 &p->pi_lock &rq->__lock &rt_rq->rt_runtime_lock irq_context: 0 &x->wait#7 irq_context: 0 &sb->s_type->i_mutex_key &rq->__lock &cfs_rq->removed.lock irq_context: 0 tasklist_lock quarantine_lock irq_context: 0 &dev->mutex register_lock irq_context: 0 &dev->mutex register_lock proc_subdir_lock irq_context: 0 &dev->mutex register_lock fs_reclaim irq_context: 0 &dev->mutex register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex register_lock pool_lock#2 irq_context: 0 &dev->mutex register_lock proc_inum_ida.xa_lock irq_context: 0 &dev->mutex register_lock proc_subdir_lock irq_context: 0 &dev->mutex &irq_desc_lock_class irq_context: 0 &dev->mutex proc_subdir_lock irq_context: 0 &dev->mutex proc_inum_ida.xa_lock irq_context: 0 &dev->mutex proc_subdir_lock irq_context: 0 &dev->mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex &x->wait#9 irq_context: 0 &dev->mutex gdp_mutex irq_context: 0 &dev->mutex gdp_mutex &k->list_lock irq_context: 0 &dev->mutex gdp_mutex fs_reclaim irq_context: 0 &dev->mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex gdp_mutex lock irq_context: 0 &dev->mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex bus_type_sem irq_context: 0 &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex dpm_list_mtx irq_context: 0 &dev->mutex subsys mutex#31 irq_context: 0 &dev->mutex subsys mutex#31 &k->k_lock irq_context: 0 &dev->mutex input_mutex irq_context: 0 &dev->mutex input_mutex fs_reclaim irq_context: 0 &dev->mutex input_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex input_mutex pool_lock#2 irq_context: 0 &dev->mutex input_mutex &dev->mutex#2 irq_context: 0 &dev->mutex input_mutex input_devices_poll_wait.lock irq_context: 0 &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex wakeup_ida.xa_lock irq_context: 0 &dev->mutex &pcp->lock &zone->lock irq_context: 0 &dev->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex subsys mutex#13 irq_context: 0 &dev->mutex subsys mutex#13 &k->k_lock irq_context: 0 &dev->mutex events_lock irq_context: 0 rcu_read_lock rcu_node_0 irq_context: 0 &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &rq->__lock irq_context: 0 register_count_mutex irq_context: 0 register_count_mutex &k->list_lock irq_context: 0 register_count_mutex fs_reclaim irq_context: 0 register_count_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 register_count_mutex pool_lock#2 irq_context: 0 register_count_mutex &c->lock irq_context: 0 register_count_mutex &____s->seqcount irq_context: 0 register_count_mutex lock irq_context: 0 register_count_mutex lock kernfs_idr_lock irq_context: 0 register_count_mutex &root->kernfs_rwsem irq_context: 0 register_count_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 register_count_mutex &k->k_lock irq_context: 0 register_count_mutex uevent_sock_mutex irq_context: 0 register_count_mutex &obj_hash[i].lock irq_context: 0 register_count_mutex rcu_read_lock &pool->lock irq_context: 0 register_count_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 register_count_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 register_count_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 register_count_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 register_count_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 register_count_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex cpu_add_remove_lock irq_context: 0 &dev->mutex thermal_cdev_ida.xa_lock irq_context: 0 &dev->mutex cpufreq_driver_lock irq_context: 0 &dev->mutex subsys mutex#32 irq_context: 0 &dev->mutex subsys mutex#32 &k->k_lock irq_context: 0 &dev->mutex thermal_list_lock irq_context: 0 &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &k->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) uevent_sock_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) running_helpers_waitq.lock irq_context: 0 scmi_requested_devices_mtx irq_context: 0 scmi_requested_devices_mtx fs_reclaim irq_context: 0 scmi_requested_devices_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 scmi_requested_devices_mtx pool_lock#2 irq_context: 0 scmi_requested_devices_mtx &c->lock irq_context: 0 scmi_requested_devices_mtx &pcp->lock &zone->lock irq_context: 0 scmi_requested_devices_mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 scmi_requested_devices_mtx &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) quarantine_lock irq_context: 0 &sb->s_type->i_mutex_key rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &sb->s_type->i_mutex_key &dentry->d_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key &dentry->d_lock pool_lock#2 irq_context: softirq rcu_callback quarantine_lock irq_context: softirq &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_mutex_key &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex iommu_probe_device_lock irq_context: 0 &dev->mutex acpi_link_lock irq_context: 0 &dev->mutex acpi_link_lock fs_reclaim irq_context: 0 &dev->mutex acpi_link_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex acpi_link_lock &____s->seqcount irq_context: 0 &dev->mutex acpi_link_lock pool_lock#2 irq_context: 0 &dev->mutex acpi_link_lock semaphore->lock irq_context: 0 &dev->mutex acpi_link_lock &obj_hash[i].lock irq_context: 0 &dev->mutex acpi_link_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 &dev->mutex acpi_link_lock &c->lock irq_context: 0 &dev->mutex acpi_link_lock &rq->__lock irq_context: 0 &dev->mutex acpi_link_lock (console_sem).lock irq_context: 0 &dev->mutex acpi_link_lock console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex acpi_link_lock console_lock console_srcu console_owner irq_context: 0 &dev->mutex acpi_link_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->mutex acpi_link_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex acpi_link_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &drv->dynids.lock irq_context: 0 &dev->mutex pci_lock irq_context: softirq rcu_callback put_task_map-wait-type-override &base->lock irq_context: softirq rcu_callback put_task_map-wait-type-override &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex virtio_index_ida.xa_lock irq_context: 0 &dev->mutex &dev->mutex &dev->power.lock irq_context: 0 &dev->mutex &dev->mutex &k->list_lock irq_context: 0 &dev->mutex &dev->mutex &k->k_lock irq_context: 0 &dev->mutex subsys mutex#33 irq_context: 0 vdpa_dev_lock irq_context: 0 &type->i_mutex_dir_key#2 irq_context: 0 &type->i_mutex_dir_key#2 fs_reclaim irq_context: 0 &type->i_mutex_dir_key#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#2 pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#2 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#2 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#2 &dentry->d_lock &wq irq_context: 0 &x->wait#11 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#11 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &x->wait#11 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &rq->__lock irq_context: 0 subsys mutex#34 irq_context: 0 subsys mutex#34 &k->k_lock irq_context: 0 &root->kernfs_rwsem &rq->__lock irq_context: 0 &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pool_lock irq_context: softirq rcu_callback &meta->lock irq_context: softirq rcu_callback kfence_freelist_lock irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 quarantine_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override &c->lock irq_context: softirq lib/debugobjects.c:101 irq_context: softirq lib/debugobjects.c:101 rcu_read_lock &pool->lock irq_context: softirq lib/debugobjects.c:101 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq lib/debugobjects.c:101 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq lib/debugobjects.c:101 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq lib/debugobjects.c:101 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (debug_obj_work).work irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &rq->__lock irq_context: 0 port_mutex irq_context: 0 port_mutex fs_reclaim irq_context: 0 port_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 port_mutex pool_lock#2 irq_context: 0 port_mutex &x->wait#9 irq_context: 0 port_mutex &obj_hash[i].lock irq_context: 0 port_mutex &____s->seqcount irq_context: 0 port_mutex &k->list_lock irq_context: 0 port_mutex lock irq_context: 0 port_mutex lock kernfs_idr_lock irq_context: 0 port_mutex &root->kernfs_rwsem irq_context: 0 port_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 port_mutex bus_type_sem irq_context: 0 port_mutex sysfs_symlink_target_lock irq_context: 0 port_mutex &k->k_lock irq_context: 0 port_mutex &root->kernfs_rwsem irq_context: 0 port_mutex &c->lock irq_context: 0 port_mutex &dev->power.lock irq_context: 0 port_mutex dpm_list_mtx irq_context: 0 port_mutex uevent_sock_mutex irq_context: 0 port_mutex rcu_read_lock &pool->lock irq_context: 0 port_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 port_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 port_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 port_mutex running_helpers_waitq.lock irq_context: 0 port_mutex &dev->mutex &dev->power.lock irq_context: 0 port_mutex &dev->mutex &k->list_lock irq_context: 0 port_mutex &dev->mutex &k->k_lock irq_context: 0 port_mutex &dev->mutex device_links_srcu irq_context: 0 port_mutex &dev->mutex fwnode_link_lock irq_context: 0 port_mutex &dev->mutex device_links_lock irq_context: 0 port_mutex &dev->mutex fs_reclaim irq_context: 0 port_mutex &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 port_mutex &dev->mutex pool_lock#2 irq_context: 0 port_mutex &dev->mutex &dev->devres_lock irq_context: 0 port_mutex &dev->mutex pinctrl_list_mutex irq_context: 0 port_mutex &dev->mutex pinctrl_maps_mutex irq_context: 0 port_mutex &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 port_mutex &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 port_mutex &dev->mutex &obj_hash[i].lock irq_context: 0 port_mutex &dev->mutex sysfs_symlink_target_lock irq_context: 0 port_mutex &dev->mutex lock irq_context: 0 port_mutex &dev->mutex lock kernfs_idr_lock irq_context: 0 port_mutex &dev->mutex &root->kernfs_rwsem irq_context: 0 port_mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 port_mutex &dev->mutex deferred_probe_mutex irq_context: 0 port_mutex &dev->mutex uevent_sock_mutex irq_context: 0 port_mutex &dev->mutex rcu_read_lock &pool->lock irq_context: 0 port_mutex &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 port_mutex &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 port_mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 port_mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 port_mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 port_mutex &dev->mutex running_helpers_waitq.lock irq_context: 0 port_mutex &dev->mutex probe_waitqueue.lock irq_context: 0 port_mutex subsys mutex#14 irq_context: 0 port_mutex &xa->xa_lock#3 irq_context: 0 port_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 port_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 port_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 port_mutex &rq->__lock irq_context: 0 port_mutex &cfs_rq->removed.lock irq_context: 0 port_mutex &obj_hash[i].lock pool_lock irq_context: 0 port_mutex &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 port_mutex &dev->mutex &c->lock irq_context: 0 port_mutex &dev->mutex &____s->seqcount irq_context: 0 port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock irq_context: 0 port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 port_mutex &dev->mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 port_mutex &port->mutex irq_context: 0 port_mutex &port->mutex fs_reclaim irq_context: 0 port_mutex &port->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 port_mutex &port->mutex pool_lock#2 irq_context: 0 port_mutex &port->mutex console_mutex irq_context: 0 port_mutex &port->mutex ctrl_ida.xa_lock irq_context: 0 port_mutex &port->mutex &x->wait#9 irq_context: 0 port_mutex &port->mutex &obj_hash[i].lock irq_context: 0 port_mutex &port->mutex &dev->power.lock irq_context: 0 port_mutex &port->mutex &k->list_lock irq_context: 0 port_mutex &port->mutex lock irq_context: 0 port_mutex &port->mutex lock kernfs_idr_lock irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 port_mutex &port->mutex bus_type_sem irq_context: 0 port_mutex &port->mutex &____s->seqcount irq_context: 0 port_mutex &port->mutex sysfs_symlink_target_lock irq_context: 0 port_mutex &port->mutex &k->k_lock irq_context: 0 port_mutex &port->mutex dpm_list_mtx irq_context: 0 port_mutex &port->mutex uevent_sock_mutex irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 port_mutex &port->mutex running_helpers_waitq.lock irq_context: 0 port_mutex &port->mutex &dev->mutex &dev->power.lock irq_context: 0 port_mutex &port->mutex &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 port_mutex &port->mutex &dev->mutex &k->list_lock irq_context: 0 port_mutex &port->mutex &dev->mutex &k->k_lock irq_context: 0 port_mutex &port->mutex &dev->mutex &dev->power.lock hrtimer_bases.lock irq_context: 0 port_mutex &port->mutex &dev->mutex &dev->power.lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 port_mutex &port->mutex subsys mutex#15 irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem irq_context: 0 port_mutex &port->mutex dev_pm_qos_sysfs_mtx irq_context: 0 port_mutex &port->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 port_mutex &port->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 port_mutex &port->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 port_mutex &port->mutex kernfs_idr_lock irq_context: 0 port_mutex &port->mutex &k->k_lock klist_remove_lock irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 port_mutex &port->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 port_mutex &port->mutex deferred_probe_mutex irq_context: 0 port_mutex &port->mutex device_links_lock irq_context: 0 port_mutex &port->mutex mmu_notifier_invalidate_range_start irq_context: 0 port_mutex &port->mutex gdp_mutex irq_context: 0 port_mutex &port->mutex gdp_mutex &k->list_lock irq_context: 0 port_mutex &port->mutex gdp_mutex fs_reclaim irq_context: 0 port_mutex &port->mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 port_mutex &port->mutex gdp_mutex pool_lock#2 irq_context: 0 port_mutex &port->mutex gdp_mutex lock irq_context: 0 port_mutex &port->mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 port_mutex &port->mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 port_mutex &port->mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 port_mutex &port->mutex &c->lock irq_context: 0 port_mutex &port->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 port_mutex &port->mutex req_lock irq_context: 0 port_mutex &port->mutex &p->pi_lock irq_context: 0 port_mutex &port->mutex &p->pi_lock &rq->__lock irq_context: 0 port_mutex &port->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 port_mutex &port->mutex &x->wait#11 irq_context: 0 port_mutex &port->mutex &rq->__lock irq_context: 0 port_mutex &port->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 port_mutex &port->mutex subsys mutex#16 irq_context: 0 port_mutex &port->mutex subsys mutex#16 &k->k_lock irq_context: 0 port_mutex &port->mutex chrdevs_lock irq_context: 0 port_mutex &port->mutex &pcp->lock &zone->lock irq_context: 0 port_mutex &port->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 port_mutex &port->mutex &cfs_rq->removed.lock irq_context: 0 port_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 port_mutex &port->mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &n->list_lock irq_context: 0 port_mutex &port->mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 port_mutex &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 port_mutex &port->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (work_completion)(&buf->work) irq_context: 0 &dev->mutex rng_index_ida.xa_lock irq_context: 0 &dev->mutex &md->mutex irq_context: 0 &dev->mutex free_vmap_area_lock irq_context: 0 &dev->mutex free_vmap_area_lock &obj_hash[i].lock irq_context: 0 &dev->mutex free_vmap_area_lock pool_lock#2 irq_context: 0 &dev->mutex vmap_area_lock irq_context: 0 &dev->mutex &md->mutex pci_lock irq_context: 0 &dev->mutex &md->mutex fs_reclaim irq_context: 0 &dev->mutex &md->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &md->mutex pool_lock#2 irq_context: 0 &dev->mutex &md->mutex &xa->xa_lock#6 irq_context: 0 &dev->mutex &md->mutex &xa->xa_lock#6 &c->lock irq_context: 0 &dev->mutex &md->mutex &xa->xa_lock#6 &____s->seqcount irq_context: 0 &dev->mutex &md->mutex &xa->xa_lock#6 pool_lock#2 irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock &its->lock irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock fs_reclaim irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock &zone->lock irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock &rq->__lock irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock &obj_hash[i].lock irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock pool_lock#2 irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock &____s->seqcount irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock lpi_range_lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock fs_reclaim irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock pool_lock#2 irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock lock kernfs_idr_lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &md->mutex &domain->mutex fs_reclaim irq_context: 0 &dev->mutex &md->mutex &domain->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &md->mutex &domain->mutex pool_lock#2 irq_context: 0 &dev->mutex &md->mutex &domain->mutex &irq_desc_lock_class irq_context: 0 &dev->mutex &md->mutex &irq_desc_lock_class irq_context: 0 &dev->mutex &md->mutex tmpmask_lock irq_context: 0 &dev->mutex &md->mutex &its->lock irq_context: 0 &dev->mutex &md->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex &md->mutex lock irq_context: 0 &dev->mutex &md->mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex &md->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex &md->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &c->lock irq_context: 0 &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &____s->seqcount irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class &its->lock irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock tmpmask_lock irq_context: 0 &dev->mutex &zone->lock irq_context: 0 &dev->mutex &zone->lock &____s->seqcount irq_context: 0 &dev->mutex &dev->vqs_list_lock irq_context: 0 &dev->mutex &vp_dev->lock irq_context: 0 &dev->mutex register_lock &c->lock irq_context: 0 &dev->mutex register_lock &____s->seqcount irq_context: 0 &dev->mutex rng_mutex irq_context: 0 &dev->mutex rng_mutex &x->wait#13 irq_context: 0 &dev->mutex rng_mutex fs_reclaim irq_context: 0 &dev->mutex rng_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex rng_mutex pool_lock#2 irq_context: 0 &dev->mutex rng_mutex kthread_create_lock irq_context: 0 &dev->mutex rng_mutex &p->pi_lock irq_context: 0 &dev->mutex rng_mutex &x->wait irq_context: 0 &dev->mutex rng_mutex &rq->__lock irq_context: 0 &dev->mutex rng_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex rng_mutex &rq->__lock &cfs_rq->removed.lock irq_context: hardirq &x->wait#14 irq_context: 0 &dev->mutex rng_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex rng_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex reading_mutex irq_context: 0 rng_mutex irq_context: 0 &dev->mutex input_pool.lock irq_context: 0 reading_mutex irq_context: 0 &dev->mutex &dev->config_lock irq_context: 0 reading_mutex &x->wait#14 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 misc_mtx &cfs_rq->removed.lock irq_context: softirq drivers/char/random.c:1010 irq_context: softirq drivers/char/random.c:1010 input_pool.lock irq_context: softirq rcu_callback &base->lock irq_context: softirq rcu_callback &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 fs_reclaim &rq->__lock irq_context: 0 fs_reclaim &cfs_rq->removed.lock irq_context: 0 fs_reclaim &obj_hash[i].lock irq_context: 0 fs_reclaim pool_lock#2 irq_context: 0 &dev->devres_lock irq_context: 0 &dev->managed.lock irq_context: 0 &type->s_umount_key#23/1 irq_context: 0 &type->s_umount_key#23/1 fs_reclaim irq_context: 0 &type->s_umount_key#23/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#23/1 pool_lock#2 irq_context: 0 &type->s_umount_key#23/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#23/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#23/1 shrinker_mutex irq_context: 0 &type->s_umount_key#23/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#23/1 sb_lock irq_context: 0 &type->s_umount_key#23/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#23/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#23/1 &sb->s_type->i_lock_key#21 irq_context: 0 &type->s_umount_key#23/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#23/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#23/1 &sb->s_type->i_lock_key#21 &dentry->d_lock irq_context: 0 &type->s_umount_key#23/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_lock_key#21 irq_context: 0 lock drm_minor_lock irq_context: 0 lock drm_minor_lock pool_lock#2 irq_context: 0 stack_depot_init_mutex irq_context: 0 subsys mutex#35 irq_context: 0 subsys mutex#35 &k->k_lock irq_context: 0 drm_minor_lock irq_context: 0 &dev->mode_config.idr_mutex irq_context: 0 &dev->mode_config.idr_mutex fs_reclaim irq_context: 0 &dev->mode_config.idr_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mode_config.idr_mutex pool_lock#2 irq_context: 0 crtc_ww_class_acquire irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_acquire irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_acquire reservation_ww_class_mutex irq_context: 0 &dev->mode_config.blob_lock irq_context: 0 &xa->xa_lock#7 irq_context: 0 &xa->xa_lock#8 irq_context: 0 &dev->mode_config.connector_list_lock irq_context: 0 &dev->vbl_lock irq_context: 0 drm_connector_list_iter &dev->mode_config.connector_list_lock irq_context: 0 drm_connector_list_iter fs_reclaim irq_context: 0 drm_connector_list_iter fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 drm_connector_list_iter pool_lock#2 irq_context: 0 drm_connector_list_iter &connector->mutex irq_context: 0 drm_connector_list_iter &connector->mutex fs_reclaim irq_context: 0 drm_connector_list_iter &connector->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 drm_connector_list_iter &connector->mutex pool_lock#2 irq_context: 0 drm_connector_list_iter &connector->mutex &x->wait#9 irq_context: 0 drm_connector_list_iter &connector->mutex &obj_hash[i].lock irq_context: 0 drm_connector_list_iter &connector->mutex &k->list_lock irq_context: 0 drm_connector_list_iter &connector->mutex lock irq_context: 0 drm_connector_list_iter &connector->mutex lock kernfs_idr_lock irq_context: 0 drm_connector_list_iter &connector->mutex &root->kernfs_rwsem irq_context: 0 drm_connector_list_iter &connector->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 drm_connector_list_iter &connector->mutex bus_type_sem irq_context: 0 drm_connector_list_iter &connector->mutex sysfs_symlink_target_lock irq_context: 0 drm_connector_list_iter &connector->mutex &c->lock irq_context: 0 drm_connector_list_iter &connector->mutex &____s->seqcount irq_context: 0 drm_connector_list_iter &connector->mutex &root->kernfs_rwsem irq_context: 0 drm_connector_list_iter &connector->mutex &dev->power.lock irq_context: 0 drm_connector_list_iter &connector->mutex dpm_list_mtx irq_context: 0 drm_connector_list_iter &connector->mutex uevent_sock_mutex irq_context: 0 drm_connector_list_iter &connector->mutex rcu_read_lock &pool->lock irq_context: 0 drm_connector_list_iter &connector->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 drm_connector_list_iter &connector->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 drm_connector_list_iter &connector->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 drm_connector_list_iter &connector->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 drm_connector_list_iter &connector->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 drm_connector_list_iter &connector->mutex running_helpers_waitq.lock irq_context: 0 drm_connector_list_iter &connector->mutex &k->k_lock irq_context: 0 drm_connector_list_iter &connector->mutex subsys mutex#35 irq_context: 0 drm_connector_list_iter &connector->mutex subsys mutex#35 &k->k_lock irq_context: 0 drm_connector_list_iter &connector->mutex pin_fs_lock irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 drm_connector_list_iter &connector->mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 drm_connector_list_iter &connector->mutex &dev->mode_config.idr_mutex irq_context: 0 drm_connector_list_iter &connector->mutex connector_list_lock irq_context: 0 drm_connector_list_iter &connector->mutex &pcp->lock &zone->lock irq_context: 0 drm_connector_list_iter &connector->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->filelist_mutex irq_context: 0 &dev->clientlist_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock irq_context: 0 &dev->clientlist_mutex &helper->lock drm_connector_list_iter &dev->mode_config.connector_list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock drm_connector_list_iter fs_reclaim irq_context: 0 &dev->clientlist_mutex &helper->lock drm_connector_list_iter fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock drm_connector_list_iter pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock fs_reclaim irq_context: 0 &dev->clientlist_mutex &helper->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &c->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &____s->seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex &dev->mode_config.mutex &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &client->modeset_mutex drm_connector_list_iter &dev->mode_config.connector_list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock &sb->s_type->i_lock_key irq_context: 0 &dev->clientlist_mutex &helper->lock &s->s_inode_list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock batched_entropy_u32.lock irq_context: 0 &dev->clientlist_mutex &helper->lock &____s->seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock &sb->s_type->i_lock_key &dentry->d_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &mgr->vm_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &mgr->vm_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->object_name_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->object_name_lock lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->object_name_lock lock &file_private->table_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->object_name_lock lock &file_private->table_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &node->vm_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &file_private->table_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->mode_config.idr_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->mode_config.fb_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &file->fbs_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &prime_fpriv->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &node->vm_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &node->vm_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &file_private->table_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &file_private->table_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock free_vmap_area_lock irq_context: 0 &dev->clientlist_mutex &helper->lock vmap_area_lock irq_context: 0 &dev->clientlist_mutex &helper->lock init_mm.page_table_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &c->lock irq_context: 0 &dev->clientlist_mutex &helper->lock free_vmap_area_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock free_vmap_area_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock irq_context: 0 &dev->clientlist_mutex registration_lock fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock &x->wait#9 irq_context: 0 &dev->clientlist_mutex registration_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock &k->list_lock irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex &k->list_lock irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex lock irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock lock irq_context: 0 &dev->clientlist_mutex registration_lock lock kernfs_idr_lock irq_context: 0 &dev->clientlist_mutex registration_lock &root->kernfs_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock bus_type_sem irq_context: 0 &dev->clientlist_mutex registration_lock sysfs_symlink_target_lock irq_context: 0 &dev->clientlist_mutex registration_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock &root->kernfs_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock &c->lock irq_context: 0 &dev->clientlist_mutex registration_lock &____s->seqcount irq_context: 0 &dev->clientlist_mutex registration_lock &dev->power.lock irq_context: 0 &dev->clientlist_mutex registration_lock dpm_list_mtx irq_context: 0 &dev->clientlist_mutex registration_lock req_lock irq_context: 0 &dev->clientlist_mutex registration_lock &p->pi_lock irq_context: 0 &dev->clientlist_mutex registration_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->clientlist_mutex registration_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex registration_lock &rq->__lock irq_context: 0 &dev->clientlist_mutex registration_lock &x->wait#11 irq_context: 0 &dev->clientlist_mutex registration_lock uevent_sock_mutex irq_context: 0 &dev->clientlist_mutex registration_lock rcu_read_lock &pool->lock irq_context: 0 &dev->clientlist_mutex registration_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->clientlist_mutex registration_lock running_helpers_waitq.lock irq_context: 0 &dev->clientlist_mutex registration_lock &k->k_lock irq_context: 0 &dev->clientlist_mutex registration_lock subsys mutex#11 irq_context: 0 &dev->clientlist_mutex registration_lock subsys mutex#11 &k->k_lock irq_context: 0 &dev->clientlist_mutex registration_lock vt_switch_mutex irq_context: 0 &dev->clientlist_mutex registration_lock vt_switch_mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock vt_switch_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock vt_switch_mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock (console_sem).lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &fb_info->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock console_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock console_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &____s->seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock vt_event_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &base->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &base->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &x->wait#9 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &k->list_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock gdp_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock gdp_mutex &k->list_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock lock kernfs_idr_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &root->kernfs_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock console_lock bus_type_sem irq_context: 0 &dev->clientlist_mutex registration_lock console_lock sysfs_symlink_target_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &c->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &root->kernfs_rwsem irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &dev->power.lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock dpm_list_mtx irq_context: 0 &dev->clientlist_mutex registration_lock console_lock uevent_sock_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock rcu_read_lock &pool->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock running_helpers_waitq.lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock subsys mutex#6 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock subsys mutex#6 &k->k_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &c->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &____s->seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->mode_config.idr_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->mode_config.blob_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &dev->mode_config.connector_list_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &crtc->commit_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &c->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &____s->seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &xa->xa_lock#9 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &sb->s_type->i_lock_key irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &info->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex lock#4 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &xa->xa_lock#9 pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex lock#4 &lruvec->lru_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &xa->xa_lock#9 &c->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &xa->xa_lock#9 &____s->seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &pcp->lock &zone->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &rq->__lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex free_vmap_area_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex vmap_area_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex init_mm.page_table_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &____s->seqcount#5 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &x->wait#15 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->vbl_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->vbl_lock &dev->vblank_time_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->vbl_lock &dev->vblank_time_lock tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->vbl_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &dev->vbl_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock &____s->seqcount#5 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &x->wait#15 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex (work_completion)(&vkms_state->composer_work) irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->damage_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->damage_lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->lock reservation_ww_class_mutex irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter fs_reclaim irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vblank_time_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &base->lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &base->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &vkms_out->lock irq_context: hardirq &vkms_out->lock &dev->event_lock irq_context: hardirq &vkms_out->lock &dev->event_lock &dev->vblank_time_lock irq_context: hardirq &vkms_out->lock &dev->event_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock irq_context: hardirq &vkms_out->lock &dev->event_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock &____s->seqcount#5 irq_context: hardirq &vkms_out->lock &dev->event_lock &vblank->queue irq_context: hardirq &vkms_out->lock &dev->event_lock &____s->seqcount#5 irq_context: hardirq &vkms_out->lock &dev->event_lock &obj_hash[i].lock irq_context: hardirq &vkms_out->lock &dev->event_lock &base->lock irq_context: hardirq &vkms_out->lock &dev->event_lock &base->lock &obj_hash[i].lock irq_context: hardirq &vkms_out->lock &dev->event_lock &x->wait#15 irq_context: hardirq &vkms_out->lock &dev->event_lock &x->wait#15 &p->pi_lock irq_context: hardirq &vkms_out->lock &dev->event_lock &x->wait#15 &p->pi_lock &rq->__lock irq_context: hardirq &vkms_out->lock &dev->event_lock &x->wait#15 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &vkms_out->lock &dev->event_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex (&timer.timer) irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex (work_completion)(&vkms_state->composer_work)#2 irq_context: 0 &dev->clientlist_mutex registration_lock console_lock (console_sem).lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock console_owner_lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->lock &lock->wait_lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->lock sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&helper->damage_work) &helper->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &lock->wait_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &p->pi_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex registration_lock console_lock console_srcu console_owner_lock irq_context: 0 &dev->clientlist_mutex registration_lock console_lock console_srcu console_owner irq_context: 0 &dev->clientlist_mutex registration_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->clientlist_mutex registration_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->clientlist_mutex (console_sem).lock irq_context: 0 &dev->clientlist_mutex console_lock console_srcu console_owner_lock irq_context: 0 &dev->clientlist_mutex console_lock console_srcu console_owner irq_context: 0 &dev->clientlist_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->clientlist_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->clientlist_mutex kernel_fb_helper_lock irq_context: softirq (&timer.timer) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 uevent_sock_mutex &rq->__lock irq_context: 0 uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 uevent_sock_mutex &obj_hash[i].lock irq_context: 0 uevent_sock_mutex pool_lock#2 irq_context: 0 drivers_lock#2 irq_context: 0 devices_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex lock kernfs_idr_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &root->kernfs_rwsem irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 cpu_hotplug_lock cpuhp_state-up &x->wait#9 irq_context: 0 cpu_hotplug_lock cpuhp_state-up &k->list_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up bus_type_sem irq_context: 0 cpu_hotplug_lock cpuhp_state-up &k->k_lock irq_context: 0 cpu_hotplug_lock cpuhp_state-up lock kernfs_idr_lock pool_lock#2 irq_context: 0 cpu_hotplug_lock cpuhp_state-up &pcp->lock irq_context: 0 &x->wait#6 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) batched_entropy_u64.lock crngs.lock base_crng.lock irq_context: 0 blk_queue_ida.xa_lock irq_context: 0 &sb->s_type->i_lock_key#3 irq_context: 0 &xa->xa_lock#10 irq_context: 0 lock &q->queue_lock irq_context: 0 lock &q->queue_lock &blkcg->lock irq_context: 0 &q->queue_lock irq_context: 0 &q->queue_lock pool_lock#2 irq_context: 0 &q->queue_lock pcpu_lock irq_context: 0 &q->queue_lock &obj_hash[i].lock irq_context: 0 &q->queue_lock percpu_counters_lock irq_context: 0 &q->queue_lock &obj_hash[i].lock pool_lock irq_context: 0 &q->queue_lock &blkcg->lock irq_context: 0 &bdev->bd_size_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 subsys mutex#36 irq_context: 0 subsys mutex#36 &k->k_lock irq_context: 0 dev_hotplug_mutex irq_context: 0 dev_hotplug_mutex &dev->power.lock irq_context: 0 &q->sysfs_dir_lock irq_context: 0 &q->sysfs_dir_lock fs_reclaim irq_context: 0 &q->sysfs_dir_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_dir_lock pool_lock#2 irq_context: 0 &q->sysfs_dir_lock lock irq_context: 0 &q->sysfs_dir_lock lock kernfs_idr_lock irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &q->sysfs_dir_lock &c->lock irq_context: 0 &q->sysfs_dir_lock &____s->seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex pin_fs_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 percpu_ref_switch_lock irq_context: 0 subsys mutex#37 irq_context: 0 subsys mutex#37 &k->k_lock irq_context: 0 cgwb_lock irq_context: 0 bdi_lock irq_context: 0 inode_hash_lock irq_context: 0 inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 bdev_lock irq_context: 0 &disk->open_mutex irq_context: 0 &disk->open_mutex fs_reclaim irq_context: 0 &disk->open_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &disk->open_mutex pool_lock#2 irq_context: 0 &disk->open_mutex free_vmap_area_lock irq_context: 0 &disk->open_mutex vmap_area_lock irq_context: 0 &disk->open_mutex &____s->seqcount irq_context: 0 &disk->open_mutex init_mm.page_table_lock irq_context: 0 &disk->open_mutex &xa->xa_lock#9 irq_context: 0 &disk->open_mutex lock#4 irq_context: 0 &disk->open_mutex mmu_notifier_invalidate_range_start irq_context: 0 &disk->open_mutex &c->lock irq_context: 0 &disk->open_mutex &mapping->i_private_lock irq_context: 0 &disk->open_mutex tk_core.seq.seqcount irq_context: 0 &disk->open_mutex &ret->b_uptodate_lock irq_context: 0 &disk->open_mutex &obj_hash[i].lock irq_context: 0 &disk->open_mutex &xa->xa_lock#9 pool_lock#2 irq_context: 0 &disk->open_mutex purge_vmap_area_lock irq_context: 0 &disk->open_mutex &sb->s_type->i_lock_key#3 irq_context: 0 &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 irq_context: 0 &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &disk->open_mutex lock#4 &lruvec->lru_lock irq_context: 0 &disk->open_mutex lock#5 irq_context: 0 &disk->open_mutex &lruvec->lru_lock irq_context: 0 lock &q->queue_lock &blkcg->lock pool_lock#2 irq_context: 0 &q->queue_lock &c->lock irq_context: 0 &q->queue_lock &____s->seqcount irq_context: 0 &q->sysfs_dir_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &disk->open_mutex free_vmap_area_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex free_vmap_area_lock pool_lock#2 irq_context: 0 &disk->open_mutex purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex purge_vmap_area_lock pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 &disk->open_mutex &xa->xa_lock#9 &c->lock irq_context: 0 &disk->open_mutex &xa->xa_lock#9 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &q->queue_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &q->queue_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &q->queue_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 loop_ctl_mutex irq_context: 0 loop_ctl_mutex fs_reclaim irq_context: 0 loop_ctl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 loop_ctl_mutex pool_lock#2 irq_context: 0 &q->sysfs_lock irq_context: 0 &q->sysfs_lock &q->unused_hctx_lock irq_context: 0 &q->sysfs_lock mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_lock pool_lock#2 irq_context: 0 &q->sysfs_lock &obj_hash[i].lock irq_context: 0 &q->sysfs_lock cpu_hotplug_lock irq_context: 0 &q->sysfs_lock cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 &q->sysfs_lock fs_reclaim irq_context: 0 &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_lock &xa->xa_lock#11 irq_context: 0 &set->tag_list_lock irq_context: 0 &q->mq_freeze_lock irq_context: 0 &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 &q->mq_freeze_lock percpu_ref_switch_lock rcu_read_lock &q->mq_freeze_wq irq_context: 0 &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock fs_reclaim irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &obj_hash[i].lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock percpu_ref_switch_lock rcu_read_lock &q->mq_freeze_wq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex percpu_ref_switch_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex pin_fs_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &stats->lock irq_context: 0 &q->sysfs_lock &obj_hash[i].lock pool_lock irq_context: 0 &q->sysfs_lock &rq->__lock irq_context: 0 &q->sysfs_lock &cfs_rq->removed.lock irq_context: 0 &q->sysfs_lock &c->lock irq_context: 0 &q->sysfs_lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&p->wq) purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) purge_vmap_area_lock pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pcpu_alloc_mutex &rq->__lock irq_context: 0 pcpu_alloc_mutex &cfs_rq->removed.lock irq_context: 0 pcpu_alloc_mutex &obj_hash[i].lock irq_context: 0 pcpu_alloc_mutex pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &c->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &____s->seqcount irq_context: 0 &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &obj_hash[i].lock pool_lock irq_context: 0 &q->queue_lock &pcp->lock &zone->lock irq_context: 0 &q->queue_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 nbd_index_mutex irq_context: 0 nbd_index_mutex fs_reclaim irq_context: 0 nbd_index_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nbd_index_mutex pool_lock#2 irq_context: 0 set->srcu irq_context: 0 (work_completion)(&(&q->requeue_work)->work) irq_context: 0 (work_completion)(&(&hctx->run_work)->work) irq_context: 0 &q->debugfs_mutex irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock lock kernfs_idr_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &root->kernfs_rwsem irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex set->srcu irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 batched_entropy_u8.lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 batched_entropy_u8.lock crngs.lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock rcu_node_0 irq_context: 0 &q->sysfs_dir_lock lock kernfs_idr_lock &c->lock irq_context: 0 &q->sysfs_dir_lock lock kernfs_idr_lock &____s->seqcount irq_context: 0 &q->sysfs_lock &pcp->lock &zone->lock irq_context: 0 &q->sysfs_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq &(&ops->cursor_work)->timer irq_context: softirq &(&ops->cursor_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&ops->cursor_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&ops->cursor_work)->timer rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq &(&ops->cursor_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) (console_sem).lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock &helper->damage_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) console_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&ops->cursor_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 zram_index_mutex irq_context: 0 zram_index_mutex fs_reclaim irq_context: 0 zram_index_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex pool_lock#2 irq_context: 0 zram_index_mutex blk_queue_ida.xa_lock irq_context: 0 zram_index_mutex &obj_hash[i].lock irq_context: 0 zram_index_mutex pcpu_alloc_mutex irq_context: 0 zram_index_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 zram_index_mutex bio_slab_lock irq_context: 0 zram_index_mutex &c->lock irq_context: 0 zram_index_mutex &____s->seqcount irq_context: 0 zram_index_mutex percpu_counters_lock irq_context: 0 zram_index_mutex mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex &sb->s_type->i_lock_key#3 irq_context: 0 zram_index_mutex &s->s_inode_list_lock irq_context: 0 zram_index_mutex &xa->xa_lock#10 irq_context: 0 zram_index_mutex lock irq_context: 0 zram_index_mutex lock &q->queue_lock irq_context: 0 zram_index_mutex lock &q->queue_lock &blkcg->lock irq_context: 0 zram_index_mutex &obj_hash[i].lock pool_lock irq_context: 0 zram_index_mutex &q->queue_lock irq_context: 0 zram_index_mutex &q->queue_lock pool_lock#2 irq_context: 0 zram_index_mutex &q->queue_lock pcpu_lock irq_context: 0 zram_index_mutex &q->queue_lock &obj_hash[i].lock irq_context: 0 zram_index_mutex &q->queue_lock percpu_counters_lock irq_context: 0 zram_index_mutex &q->queue_lock &blkcg->lock irq_context: 0 zram_index_mutex &x->wait#9 irq_context: 0 zram_index_mutex &bdev->bd_size_lock irq_context: 0 zram_index_mutex &k->list_lock irq_context: 0 zram_index_mutex gdp_mutex irq_context: 0 zram_index_mutex gdp_mutex &k->list_lock irq_context: 0 zram_index_mutex lock kernfs_idr_lock irq_context: 0 zram_index_mutex &root->kernfs_rwsem irq_context: 0 zram_index_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 zram_index_mutex bus_type_sem irq_context: 0 zram_index_mutex sysfs_symlink_target_lock irq_context: 0 zram_index_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 zram_index_mutex &root->kernfs_rwsem irq_context: 0 zram_index_mutex &dev->power.lock irq_context: 0 zram_index_mutex dpm_list_mtx irq_context: 0 zram_index_mutex req_lock irq_context: 0 zram_index_mutex &p->pi_lock irq_context: 0 zram_index_mutex &x->wait#11 irq_context: 0 zram_index_mutex &rq->__lock irq_context: 0 zram_index_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 zram_index_mutex subsys mutex#36 irq_context: 0 zram_index_mutex subsys mutex#36 &k->k_lock irq_context: 0 zram_index_mutex dev_hotplug_mutex irq_context: 0 zram_index_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock fs_reclaim irq_context: 0 zram_index_mutex &q->sysfs_dir_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex &q->sysfs_dir_lock pool_lock#2 irq_context: 0 zram_index_mutex &q->sysfs_dir_lock lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock lock kernfs_idr_lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &root->kernfs_rwsem irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &c->lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &____s->seqcount irq_context: 0 zram_index_mutex &q->sysfs_dir_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex pin_fs_lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 zram_index_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 zram_index_mutex percpu_ref_switch_lock irq_context: 0 zram_index_mutex uevent_sock_mutex irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 zram_index_mutex running_helpers_waitq.lock irq_context: 0 zram_index_mutex subsys mutex#37 irq_context: 0 zram_index_mutex subsys mutex#37 &k->k_lock irq_context: 0 zram_index_mutex cgwb_lock irq_context: 0 zram_index_mutex pin_fs_lock irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 zram_index_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 zram_index_mutex bdi_lock irq_context: 0 zram_index_mutex inode_hash_lock irq_context: 0 zram_index_mutex inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 zram_index_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 zram_index_mutex (console_sem).lock irq_context: 0 zram_index_mutex console_lock console_srcu console_owner_lock irq_context: 0 zram_index_mutex console_lock console_srcu console_owner irq_context: 0 zram_index_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 zram_index_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 subsys mutex#38 irq_context: 0 subsys mutex#38 &k->k_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]#3 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]#3 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]#3 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]#3 configfs_dirent_lock irq_context: 0 &q->sysfs_lock &xa->xa_lock#11 pool_lock#2 irq_context: 0 &q->queue_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &lock irq_context: 0 &lock nullb_indexes.xa_lock irq_context: 0 &q->sysfs_dir_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex rcu_read_lock tk_core.seq.seqcount irq_context: 0 &disk->open_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex rcu_read_lock &base->lock irq_context: 0 &disk->open_mutex rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex rcu_read_lock &ret->b_uptodate_lock irq_context: 0 nfc_index_ida.xa_lock irq_context: 0 nfc_devlist_mutex irq_context: 0 nfc_devlist_mutex fs_reclaim irq_context: 0 nfc_devlist_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nfc_devlist_mutex pool_lock#2 irq_context: 0 nfc_devlist_mutex &k->list_lock irq_context: 0 nfc_devlist_mutex gdp_mutex irq_context: 0 nfc_devlist_mutex gdp_mutex &k->list_lock irq_context: 0 nfc_devlist_mutex gdp_mutex fs_reclaim irq_context: 0 nfc_devlist_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nfc_devlist_mutex gdp_mutex pool_lock#2 irq_context: 0 nfc_devlist_mutex gdp_mutex lock irq_context: 0 nfc_devlist_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 nfc_devlist_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 nfc_devlist_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 nfc_devlist_mutex lock irq_context: 0 nfc_devlist_mutex lock kernfs_idr_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 nfc_devlist_mutex bus_type_sem irq_context: 0 nfc_devlist_mutex sysfs_symlink_target_lock irq_context: 0 nfc_devlist_mutex &c->lock irq_context: 0 nfc_devlist_mutex &____s->seqcount irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem irq_context: 0 nfc_devlist_mutex &dev->power.lock irq_context: 0 nfc_devlist_mutex dpm_list_mtx irq_context: 0 nfc_devlist_mutex &pcp->lock &zone->lock irq_context: 0 nfc_devlist_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 nfc_devlist_mutex uevent_sock_mutex irq_context: 0 nfc_devlist_mutex &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex rcu_read_lock &pool->lock irq_context: 0 nfc_devlist_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 nfc_devlist_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 nfc_devlist_mutex running_helpers_waitq.lock irq_context: 0 nfc_devlist_mutex subsys mutex#39 irq_context: 0 nfc_devlist_mutex subsys mutex#39 &k->k_lock irq_context: 0 llcp_devices_lock irq_context: 0 &dev->mutex rfkill_global_mutex irq_context: 0 &dev->mutex rfkill_global_mutex fs_reclaim irq_context: 0 &dev->mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex rfkill_global_mutex pool_lock#2 irq_context: 0 &dev->mutex rfkill_global_mutex &k->list_lock irq_context: 0 &dev->mutex rfkill_global_mutex lock irq_context: 0 &dev->mutex rfkill_global_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex rfkill_global_mutex bus_type_sem irq_context: 0 &dev->mutex rfkill_global_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex rfkill_global_mutex &c->lock irq_context: 0 &dev->mutex rfkill_global_mutex &____s->seqcount irq_context: 0 &dev->mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex rfkill_global_mutex &dev->power.lock irq_context: 0 &dev->mutex rfkill_global_mutex dpm_list_mtx irq_context: 0 &dev->mutex rfkill_global_mutex &rfkill->lock irq_context: 0 &dev->mutex rfkill_global_mutex uevent_sock_mutex irq_context: 0 &dev->mutex rfkill_global_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex rfkill_global_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex rfkill_global_mutex &k->k_lock irq_context: 0 &dev->mutex rfkill_global_mutex subsys mutex#40 irq_context: 0 &dev->mutex rfkill_global_mutex subsys mutex#40 &k->k_lock irq_context: 0 &dev->mutex rfkill_global_mutex triggers_list_lock irq_context: 0 &dev->mutex rfkill_global_mutex leds_list_lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex rfkill_global_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &rfkill->lock irq_context: 0 &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 dma_heap_minors.xa_lock irq_context: 0 subsys mutex#41 irq_context: 0 subsys mutex#41 &k->k_lock irq_context: 0 heap_list_lock irq_context: 0 dma_heap_minors.xa_lock pool_lock#2 irq_context: 0 cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 subsys mutex#42 irq_context: 0 subsys mutex#42 &k->list_lock irq_context: 0 subsys mutex#42 &k->k_lock irq_context: 0 nvmf_hosts_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex semaphore->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex iommu_probe_device_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 subsys mutex#43 irq_context: 0 subsys mutex#43 &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex *(&acpi_gbl_reference_count_lock) irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &cfs_rq->removed.lock irq_context: 0 nvmf_transports_rwsem irq_context: 0 subsys mutex#44 irq_context: 0 subsys mutex#44 &k->k_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 configfs_dirent_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &sb->s_type->i_lock_key#18 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &c->lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &____s->seqcount irq_context: 0 nvmet_config_sem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock semaphore->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock *(&acpi_gbl_reference_count_lock) irq_context: 0 subsys mutex#45 irq_context: 0 subsys mutex#45 &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock (console_sem).lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock console_lock console_srcu console_owner irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex acpi_link_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex irq_domain_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &domain->mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &drv->dynids.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cpu_add_remove_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_instance_ida.xa_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex chrdevs_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex gdp_mutex &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex req_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &x->wait#11 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex subsys mutex#46 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex subsys mutex#46 &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx &dev->power.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex dev_pm_qos_mtx pm_qos_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex resource_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex free_vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock pools_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pools_reg_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex (console_sem).lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex console_lock console_srcu console_owner irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pci_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex pci_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &xa->xa_lock#6 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->dev_alloc_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->dev_alloc_lock &its->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->dev_alloc_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->dev_alloc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->dev_alloc_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->dev_alloc_lock lpi_range_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex sparse_irq_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &domain->mutex &irq_desc_lock_class irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &irq_desc_lock_class irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex tmpmask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &its->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &md->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex hrtimer_bases.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 configfs_dirent_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &c->lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &sb->s_type->i_lock_key#18 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 configfs_dirent_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &sb->s_type->i_lock_key#18 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 configfs_dirent_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &sb->s_type->i_lock_key#18 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &sb->s_type->i_lock_key#18 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7 configfs_dirent_lock irq_context: 0 &sb->s_type->i_mutex_key#6/1 &default_group_class[depth - 1]/2 &default_group_class[depth - 1]#4/2 &default_group_class[depth - 1]#5/2 &default_group_class[depth - 1]#6/2 &default_group_class[depth - 1]#7/2 irq_context: 0 backend_mutex irq_context: 0 scsi_mib_index_lock irq_context: 0 hba_lock irq_context: 0 device_mutex irq_context: 0 device_mutex fs_reclaim irq_context: 0 device_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 device_mutex pool_lock#2 irq_context: 0 &hba->device_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &desc->request_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &desc->request_mutex &irq_desc_lock_class irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &desc->request_mutex &irq_desc_lock_class &its->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock tmpmask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock proc_inum_ida.xa_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex register_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &irq_desc_lock_class irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex proc_inum_ida.xa_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pcpu_alloc_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex batched_entropy_u32.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex blk_queue_ida.xa_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock &q->unused_hctx_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock cpu_hotplug_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &q->sysfs_lock &xa->xa_lock#11 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &set->tag_list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &ctrl->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &ctrl->lock &ctrl->state_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &hctx->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &hctx->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &nvmeq->sq_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &x->wait#16 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &base->lock &obj_hash[i].lock irq_context: softirq &x->wait#16 irq_context: 0 rcu_read_lock init_fs.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex (&timer.timer) irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &k->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex &k->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock bus_type_sem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &dev->power.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock dpm_list_mtx irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock uevent_sock_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock running_helpers_waitq.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock subsys mutex#47 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex nvme_subsystems_lock subsys mutex#47 &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &xa->xa_lock#12 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &rq->__lock irq_context: 0 part_parser_lock irq_context: 0 mtd_table_mutex irq_context: 0 chip_drvs_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class irq_resend_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class &its->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &ent->pde_unload_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex proc_inum_ida.xa_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex pci_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &irq_desc_lock_class irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock lpi_range_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock lpi_range_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock lpi_range_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex &its->dev_alloc_lock &its->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &ent->pde_unload_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock proc_inum_ida.xa_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex sparse_irq_lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &xa->xa_lock#6 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex purge_vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock pci_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &rq->__lock irq_context: 0 (kmod_concurrent_max).lock irq_context: 0 &x->wait#17 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock free_vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock vmap_area_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &sig->wait_chldexit irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &xa->xa_lock#6 pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->dev_alloc_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->dev_alloc_lock &its->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->dev_alloc_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->dev_alloc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->dev_alloc_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->dev_alloc_lock lpi_range_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 tasklist_lock &sighand->siglock &sig->wait_chldexit &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &irq_desc_lock_class irq_context: 0 tasklist_lock &sighand->siglock &sig->wait_chldexit &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex tmpmask_lock irq_context: 0 tasklist_lock &sighand->siglock &sig->wait_chldexit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &its->lock irq_context: 0 tasklist_lock &sighand->siglock &sig->wait_chldexit &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &____s->seqcount#4 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &prev->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &(&sig->stats_lock)->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &(&sig->stats_lock)->lock &____s->seqcount#4 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) css_set_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock input_pool.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex &domain->mutex sparse_irq_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &x->wait#17 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &x->wait#17 &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &x->wait#17 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &x->wait#17 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mtd_table_mutex fs_reclaim irq_context: 0 mtd_table_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex pool_lock#2 irq_context: 0 mtd_table_mutex &x->wait#9 irq_context: 0 mtd_table_mutex &obj_hash[i].lock irq_context: 0 mtd_table_mutex &k->list_lock irq_context: 0 mtd_table_mutex gdp_mutex irq_context: 0 mtd_table_mutex gdp_mutex &k->list_lock irq_context: 0 mtd_table_mutex gdp_mutex fs_reclaim irq_context: 0 mtd_table_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex gdp_mutex pool_lock#2 irq_context: 0 mtd_table_mutex gdp_mutex lock irq_context: 0 mtd_table_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 mtd_table_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 mtd_table_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 mtd_table_mutex lock irq_context: 0 mtd_table_mutex lock kernfs_idr_lock irq_context: 0 mtd_table_mutex &root->kernfs_rwsem irq_context: 0 mtd_table_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 mtd_table_mutex bus_type_sem irq_context: 0 mtd_table_mutex sysfs_symlink_target_lock irq_context: 0 mtd_table_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 mtd_table_mutex &c->lock irq_context: 0 mtd_table_mutex &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &md->mutex fs_reclaim &rq->__lock irq_context: 0 mtd_table_mutex &root->kernfs_rwsem irq_context: 0 mtd_table_mutex &dev->power.lock irq_context: 0 mtd_table_mutex dpm_list_mtx irq_context: 0 mtd_table_mutex req_lock irq_context: 0 mtd_table_mutex &p->pi_lock irq_context: 0 mtd_table_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 mtd_table_mutex &p->pi_lock &rq->__lock irq_context: 0 mtd_table_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mtd_table_mutex &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class mask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock tmpmask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock proc_subdir_lock irq_context: 0 mtd_table_mutex &x->wait#11 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock proc_inum_ida.xa_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock proc_subdir_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock register_lock &____s->seqcount irq_context: 0 mtd_table_mutex batched_entropy_u8.lock irq_context: 0 mtd_table_mutex kfence_freelist_lock irq_context: 0 mtd_table_mutex uevent_sock_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &irq_desc_lock_class irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock proc_subdir_lock irq_context: 0 mtd_table_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock proc_inum_ida.xa_lock irq_context: 0 mtd_table_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock proc_subdir_lock irq_context: 0 mtd_table_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &cma->lock irq_context: 0 mtd_table_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex irq_context: 0 mtd_table_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex &zone->lock irq_context: 0 mtd_table_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mtd_table_mutex running_helpers_waitq.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 mtd_table_mutex subsys mutex#48 irq_context: 0 mtd_table_mutex subsys mutex#48 rcu_node_0 irq_context: 0 mtd_table_mutex subsys mutex#48 &rcu_state.expedited_wq irq_context: 0 mtd_table_mutex subsys mutex#48 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 mtd_table_mutex subsys mutex#48 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 mtd_table_mutex subsys mutex#48 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mtd_table_mutex subsys mutex#48 &rq->__lock irq_context: 0 mtd_table_mutex subsys mutex#48 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mtd_table_mutex subsys mutex#48 &k->k_lock irq_context: 0 mtd_table_mutex devtree_lock irq_context: 0 mtd_table_mutex nvmem_ida.xa_lock irq_context: 0 mtd_table_mutex nvmem_cell_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 mtd_table_mutex &k->k_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex lock#2 irq_context: 0 mtd_table_mutex &dev->mutex &dev->power.lock irq_context: 0 mtd_table_mutex &dev->mutex &k->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex &zone->lock &____s->seqcount irq_context: 0 mtd_table_mutex &dev->mutex &k->k_lock irq_context: 0 mtd_table_mutex subsys mutex#49 irq_context: 0 mtd_table_mutex nvmem_mutex irq_context: 0 mtd_table_mutex pin_fs_lock irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 mtd_table_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 mtd_table_mutex &pcp->lock &zone->lock irq_context: 0 mtd_table_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 mtd_table_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mtd_table_mutex (console_sem).lock irq_context: 0 mtd_table_mutex console_lock console_srcu console_owner_lock irq_context: 0 mtd_table_mutex console_lock console_srcu console_owner irq_context: 0 mtd_table_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex cma_mutex rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mtd_table_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 mtd_table_mutex rcu_node_0 irq_context: 0 mtd_table_mutex &rcu_state.expedited_wq irq_context: 0 mtd_table_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 mtd_table_mutex pcpu_alloc_mutex irq_context: 0 mtd_table_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[0] &p->pi_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[0] &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[0] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mtd_table_mutex batched_entropy_u32.lock irq_context: 0 mtd_table_mutex mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex blk_queue_ida.xa_lock irq_context: 0 mtd_table_mutex &q->sysfs_lock irq_context: 0 mtd_table_mutex &q->sysfs_lock &q->unused_hctx_lock irq_context: 0 mtd_table_mutex &q->sysfs_lock mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_lock pool_lock#2 irq_context: 0 mtd_table_mutex &q->sysfs_lock &obj_hash[i].lock irq_context: 0 mtd_table_mutex &q->sysfs_lock cpu_hotplug_lock irq_context: 0 mtd_table_mutex &q->sysfs_lock cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 mtd_table_mutex &q->sysfs_lock fs_reclaim irq_context: 0 mtd_table_mutex &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_lock &xa->xa_lock#11 irq_context: 0 mtd_table_mutex &obj_hash[i].lock pool_lock irq_context: 0 mtd_table_mutex &set->tag_list_lock irq_context: 0 mtd_table_mutex bio_slab_lock irq_context: 0 mtd_table_mutex percpu_counters_lock irq_context: 0 mtd_table_mutex &sb->s_type->i_lock_key#3 irq_context: 0 mtd_table_mutex &s->s_inode_list_lock irq_context: 0 mtd_table_mutex &xa->xa_lock#10 irq_context: 0 mtd_table_mutex lock &q->queue_lock irq_context: 0 mtd_table_mutex lock &q->queue_lock &blkcg->lock irq_context: 0 mtd_table_mutex &q->mq_freeze_lock irq_context: 0 mtd_table_mutex &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 mtd_table_mutex &q->mq_freeze_lock percpu_ref_switch_lock rcu_read_lock &q->mq_freeze_wq irq_context: 0 mtd_table_mutex set->srcu irq_context: 0 mtd_table_mutex percpu_ref_switch_lock irq_context: softirq &x->wait#16 &p->pi_lock irq_context: 0 mtd_table_mutex &q->queue_lock irq_context: 0 mtd_table_mutex &q->queue_lock pool_lock#2 irq_context: 0 mtd_table_mutex &q->queue_lock pcpu_lock irq_context: 0 mtd_table_mutex &q->queue_lock &obj_hash[i].lock irq_context: 0 mtd_table_mutex &q->queue_lock percpu_counters_lock irq_context: 0 mtd_table_mutex &q->queue_lock &obj_hash[i].lock pool_lock irq_context: 0 mtd_table_mutex &q->queue_lock &blkcg->lock irq_context: 0 mtd_table_mutex &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 mtd_table_mutex &bdev->bd_size_lock irq_context: 0 mtd_table_mutex elv_list_lock irq_context: 0 mtd_table_mutex (work_completion)(&(&q->requeue_work)->work) irq_context: 0 mtd_table_mutex (work_completion)(&(&hctx->run_work)->work) irq_context: 0 mtd_table_mutex &q->debugfs_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class tmp_mask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &dev->shutdown_lock &desc->request_mutex &irq_desc_lock_class tmp_mask_lock tmpmask_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mtd_table_mutex subsys mutex#36 irq_context: 0 mtd_table_mutex subsys mutex#36 &k->k_lock irq_context: 0 mtd_table_mutex dev_hotplug_mutex irq_context: 0 mtd_table_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock fs_reclaim irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock pool_lock#2 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock lock kernfs_idr_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &root->kernfs_rwsem irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &c->lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &____s->seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex pin_fs_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &zone->lock &____s->seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock fs_reclaim irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock pool_lock#2 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock lock kernfs_idr_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &root->kernfs_rwsem irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &obj_hash[i].lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock percpu_ref_switch_lock rcu_read_lock &q->mq_freeze_wq irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex set->srcu irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex percpu_ref_switch_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex pin_fs_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &ctrl->namespaces_rwsem irq_context: 0 mtd_table_mutex &q->sysfs_dir_lock &q->sysfs_lock &stats->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nvme-wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->async_event_work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex (work_completion)(&ctrl->scan_work) irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->async_event_work) &nvmeq->sq_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock (wq_completion)nvme-wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex &x->wait#10 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &hctx->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &hctx->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &nvmeq->sq_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &x->wait#16 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &base->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mtd_table_mutex subsys mutex#37 irq_context: 0 mtd_table_mutex subsys mutex#37 &k->k_lock irq_context: 0 mtd_table_mutex cgwb_lock irq_context: 0 mtd_table_mutex bdi_lock irq_context: 0 mtd_table_mutex inode_hash_lock irq_context: 0 mtd_table_mutex inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock (&timer.timer) irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &ctrl->namespaces_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock blk_queue_ida.xa_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock pcpu_alloc_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock &q->unused_hctx_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock cpu_hotplug_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock &xa->xa_lock#11 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_lock &xa->xa_lock#11 pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &set->tag_list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock bio_slab_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock percpu_counters_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &s->s_inode_list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &xa->xa_lock#10 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock lock &q->queue_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock lock &q->queue_lock &blkcg->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->mq_freeze_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->mq_freeze_lock percpu_ref_switch_lock rcu_read_lock &q->mq_freeze_wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock percpu_ref_switch_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->queue_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->queue_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->queue_lock pcpu_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->queue_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->queue_lock percpu_counters_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->queue_lock &blkcg->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &x->wait#9 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock nvme_subsystems_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock &xa->xa_lock#12 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock pcpu_alloc_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &subsys->lock pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &bdev->bd_size_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &ctrl->namespaces_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock ext_devt_ida.xa_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &k->list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock lock kernfs_idr_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock bus_type_sem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &dev->power.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock dpm_list_mtx irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock req_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &p->pi_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &x->wait#11 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &k->k_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock subsys mutex#36 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock subsys mutex#36 &k->k_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock dev_hotplug_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock dev_hotplug_mutex &dev->power.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock lock kernfs_idr_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &root->kernfs_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock lock kernfs_idr_lock &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock lock kernfs_idr_lock &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex pin_fs_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock percpu_ref_switch_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock percpu_ref_switch_lock rcu_read_lock &q->mq_freeze_wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex percpu_ref_switch_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock &q->mq_freeze_wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex pin_fs_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &q->sysfs_dir_lock &q->sysfs_lock &stats->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock gdp_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock gdp_mutex &k->list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock uevent_sock_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock running_helpers_waitq.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock subsys mutex#37 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock subsys mutex#37 &k->k_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock cgwb_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock pin_fs_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock bdi_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock inode_hash_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock bdev_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex fs_reclaim irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex free_vmap_area_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex vmap_area_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex init_mm.page_table_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &xa->xa_lock#9 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex lock#4 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &mapping->i_private_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex tk_core.seq.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock &base->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock &nvmeq->sq_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &folio_wait_table[i] irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex stack_depot_init_mutex irq_context: 0 rtnl_mutex pcpu_alloc_mutex irq_context: 0 rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex kthread_create_lock irq_context: 0 rtnl_mutex &p->pi_lock irq_context: 0 rtnl_mutex &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &ret->b_uptodate_lock irq_context: hardirq &folio_wait_table[i] irq_context: hardirq &folio_wait_table[i] &p->pi_lock irq_context: 0 rtnl_mutex &x->wait irq_context: hardirq &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &xa->xa_lock#9 pool_lock#2 irq_context: hardirq &folio_wait_table[i] &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex wq_pool_mutex irq_context: 0 rtnl_mutex wq_pool_mutex &wq->mutex irq_context: 0 rtnl_mutex crngs.lock irq_context: 0 rtnl_mutex &xa->xa_lock#4 irq_context: 0 rtnl_mutex net_rwsem irq_context: 0 rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 rtnl_mutex &x->wait#9 irq_context: 0 rtnl_mutex &k->list_lock irq_context: 0 rtnl_mutex gdp_mutex irq_context: 0 rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 rtnl_mutex lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex bus_type_sem irq_context: 0 rtnl_mutex sysfs_symlink_target_lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex &root->kernfs_rwsem irq_context: 0 rtnl_mutex &dev->power.lock irq_context: 0 rtnl_mutex dpm_list_mtx irq_context: 0 rtnl_mutex uevent_sock_mutex irq_context: 0 rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex running_helpers_waitq.lock irq_context: 0 rtnl_mutex subsys mutex#20 irq_context: 0 rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 rtnl_mutex &dir->lock#2 irq_context: 0 rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 rtnl_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_hotplug_mutex irq_context: 0 rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 rtnl_mutex dev_base_lock irq_context: 0 rtnl_mutex input_pool.lock irq_context: 0 rtnl_mutex batched_entropy_u32.lock irq_context: 0 rtnl_mutex &tbl->lock irq_context: 0 rtnl_mutex sysctl_lock irq_context: 0 rtnl_mutex nl_table_lock irq_context: 0 rtnl_mutex nl_table_wait.lock irq_context: 0 rtnl_mutex rcu_read_lock &bond->stats_lock irq_context: 0 rtnl_mutex lweventlist_lock irq_context: 0 rtnl_mutex lweventlist_lock pool_lock#2 irq_context: 0 rtnl_mutex lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)gid-cache-wq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &obj_hash[i].lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex dev_base_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex proc_subdir_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex proc_subdir_lock irq_context: 0 once_lock irq_context: 0 once_lock crngs.lock irq_context: 0 (wq_completion)events (work_completion)(&w->work) irq_context: 0 (wq_completion)events (work_completion)(&w->work) cpu_hotplug_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work) cpu_hotplug_lock jump_label_mutex irq_context: 0 (wq_completion)events (work_completion)(&w->work) cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&w->work) pool_lock#2 irq_context: 0 (inet6addr_validator_chain).rwsem irq_context: 0 (inetaddr_validator_chain).rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex stack_depot_init_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex crngs.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex (console_sem).lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex console_owner_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex console_owner irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex console_lock console_srcu console_owner irq_context: 0 console_owner_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 console_owner irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &s->s_inode_list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex pcpu_alloc_mutex irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &bdev->bd_size_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &x->wait#9 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex ext_devt_ida.xa_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &k->list_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex bus_type_sem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &dev->power.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex dpm_list_mtx irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex req_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &p->pi_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &x->wait#11 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &k->k_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex subsys mutex#36 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex subsys mutex#36 &k->k_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &xa->xa_lock#10 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &xa->xa_lock#10 pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex inode_hash_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex purge_vmap_area_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex purge_vmap_area_lock pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &sb->s_type->i_lock_key#3 &xa->xa_lock#9 pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex lock#4 &lruvec->lru_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex lock#5 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &disk->open_mutex &lruvec->lru_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock nvme_ns_chr_minor_ida.xa_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock chrdevs_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock subsys mutex#50 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock subsys mutex#50 &k->k_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &dentry->d_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &x->wait#16 &p->pi_lock &rq->__lock irq_context: softirq &x->wait#16 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 subsys mutex#51 irq_context: 0 subsys mutex#51 &k->k_lock irq_context: 0 gpio_lookup_lock irq_context: 0 mdio_board_lock irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 mode_list_lock irq_context: 0 misc_mtx &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &x->wait#11 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx fill_pool_map-wait-type-override &c->lock irq_context: 0 misc_mtx fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 misc_mtx fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 l3mdev_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &ctrl->namespaces_rwsem &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock quarantine_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&ctrl->scan_work) &ctrl->scan_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&barr->work) irq_context: 0 (wq_completion)nvme-wq (work_completion)(&barr->work) &x->wait#10 irq_context: 0 (wq_completion)nvme-wq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nvme-wq (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&entry->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex batched_entropy_u8.lock irq_context: 0 &dev->mutex batched_entropy_u8.lock crngs.lock irq_context: 0 &dev->mutex kfence_freelist_lock irq_context: 0 &dev->mutex init_mm.page_table_lock irq_context: 0 &dev->mutex stack_depot_init_mutex irq_context: 0 &dev->mutex pcpu_alloc_mutex irq_context: 0 &dev->mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 &dev->mutex cpu_hotplug_lock irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex wq_pool_mutex irq_context: 0 &dev->mutex wq_pool_mutex &wq->mutex irq_context: 0 &dev->mutex pools_reg_lock irq_context: 0 &dev->mutex pools_reg_lock pools_lock irq_context: 0 &dev->mutex pools_reg_lock fs_reclaim irq_context: 0 &dev->mutex pools_reg_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex pools_reg_lock pool_lock#2 irq_context: 0 &dev->mutex pools_reg_lock lock irq_context: 0 &dev->mutex pools_reg_lock lock kernfs_idr_lock irq_context: 0 &dev->mutex pools_reg_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex pools_reg_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &retval->lock irq_context: 0 &dev->mutex &md->mutex &c->lock irq_context: 0 &dev->mutex &md->mutex &____s->seqcount irq_context: 0 &dev->mutex &md->mutex &its->dev_alloc_lock &c->lock irq_context: 0 &dev->mutex &irq_desc_lock_class tmp_mask_lock irq_context: 0 &dev->mutex &irq_desc_lock_class tmp_mask_lock tmpmask_lock irq_context: 0 &dev->mutex &irq_desc_lock_class tmp_mask_lock &its->lock irq_context: 0 &dev->mutex &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock &its->lock irq_context: 0 &dev->mutex rtnl_mutex irq_context: 0 &dev->mutex rtnl_mutex pool_lock#2 irq_context: 0 &dev->mutex rtnl_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex rtnl_mutex fs_reclaim irq_context: 0 &dev->mutex rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex rtnl_mutex &xa->xa_lock#4 irq_context: 0 &dev->mutex rtnl_mutex net_rwsem irq_context: 0 &dev->mutex rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 &dev->mutex rtnl_mutex &x->wait#9 irq_context: 0 &dev->mutex rtnl_mutex &k->list_lock irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex lock irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 &dev->mutex rtnl_mutex &____s->seqcount irq_context: 0 &dev->mutex rtnl_mutex lock irq_context: 0 &dev->mutex rtnl_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex rtnl_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex rtnl_mutex bus_type_sem irq_context: 0 &dev->mutex rtnl_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex rtnl_mutex &c->lock irq_context: 0 &dev->mutex rtnl_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex rtnl_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex rtnl_mutex &dev->power.lock irq_context: 0 &dev->mutex rtnl_mutex dpm_list_mtx irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 &dev->mutex rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex rtnl_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex rtnl_mutex &k->k_lock irq_context: 0 &dev->mutex rtnl_mutex subsys mutex#20 irq_context: 0 &dev->mutex rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 &dev->mutex rtnl_mutex &dir->lock#2 irq_context: 0 &dev->mutex rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex rtnl_mutex dev_hotplug_mutex irq_context: 0 &dev->mutex rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 &dev->mutex rtnl_mutex dev_base_lock irq_context: 0 &dev->mutex rtnl_mutex input_pool.lock irq_context: 0 &dev->mutex rtnl_mutex pcpu_alloc_mutex irq_context: 0 &dev->mutex rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 &dev->mutex rtnl_mutex batched_entropy_u32.lock irq_context: 0 &dev->mutex rtnl_mutex &tbl->lock irq_context: 0 &dev->mutex rtnl_mutex sysctl_lock irq_context: 0 &dev->mutex rtnl_mutex nl_table_lock irq_context: 0 &dev->mutex rtnl_mutex nl_table_wait.lock irq_context: 0 (wq_completion)gve irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) irq_context: 0 hnae3_common_lock irq_context: 0 subsys mutex#52 irq_context: 0 subsys mutex#52 &k->k_lock irq_context: 0 compressor_list_lock irq_context: 0 compressor_list_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem hwsim_netgroup_ida.xa_lock irq_context: 0 hwsim_radio_lock irq_context: 0 subsys mutex#53 irq_context: 0 subsys mutex#53 &k->k_lock irq_context: 0 deferred_probe_mutex irq_context: 0 rtnl_mutex param_lock irq_context: 0 rtnl_mutex param_lock rate_ctrl_mutex irq_context: 0 rtnl_mutex (console_sem).lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx irq_context: 0 rtnl_mutex &rdev->wiphy.mtx fs_reclaim irq_context: 0 rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &rdev->wiphy.mtx pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &k->list_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex &k->list_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex fs_reclaim irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock kernfs_idr_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex kobj_ns_type_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx bus_type_sem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx sysfs_symlink_target_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &c->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &____s->seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &dev->power.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex fs_reclaim irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_wait.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx running_helpers_waitq.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &k->k_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 &k->k_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx pin_fs_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx nl_table_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx reg_requests_lock irq_context: 0 rtnl_mutex &base->lock irq_context: 0 rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 rfkill_global_mutex irq_context: 0 rfkill_global_mutex fs_reclaim irq_context: 0 rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rfkill_global_mutex pool_lock#2 irq_context: 0 rfkill_global_mutex &k->list_lock irq_context: 0 rfkill_global_mutex lock irq_context: 0 rfkill_global_mutex lock kernfs_idr_lock irq_context: 0 rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rfkill_global_mutex bus_type_sem irq_context: 0 rfkill_global_mutex sysfs_symlink_target_lock irq_context: 0 rfkill_global_mutex &c->lock irq_context: 0 rfkill_global_mutex &____s->seqcount irq_context: 0 rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 rfkill_global_mutex &dev->power.lock irq_context: 0 rfkill_global_mutex dpm_list_mtx irq_context: 0 rfkill_global_mutex &rfkill->lock irq_context: 0 rfkill_global_mutex uevent_sock_mutex irq_context: 0 rfkill_global_mutex &obj_hash[i].lock irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rfkill_global_mutex running_helpers_waitq.lock irq_context: 0 rfkill_global_mutex &k->k_lock irq_context: 0 rfkill_global_mutex subsys mutex#40 irq_context: 0 rfkill_global_mutex subsys mutex#40 &k->k_lock irq_context: 0 rfkill_global_mutex triggers_list_lock irq_context: 0 rfkill_global_mutex leds_list_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx stack_depot_init_mutex irq_context: 0 rtnl_mutex &rdev->wiphy.mtx pcpu_alloc_mutex irq_context: 0 rtnl_mutex &rdev->wiphy.mtx pcpu_alloc_mutex pcpu_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &xa->xa_lock#4 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx net_rwsem irq_context: 0 rtnl_mutex &rdev->wiphy.mtx net_rwsem &list->lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &x->wait#9 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 &k->k_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &dir->lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex irq_context: 0 rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &dev->power.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx dev_base_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx input_pool.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx batched_entropy_u32.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &tbl->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx sysctl_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &fq->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &local->iflist_mtx irq_context: 0 hwsim_radio_lock rcu_read_lock rhashtable_bucket irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex &c->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx gdp_mutex &____s->seqcount irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 rfkill_global_mutex &obj_hash[i].lock pool_lock irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rfkill_global_mutex &rq->__lock irq_context: 0 rfkill_global_mutex.wait_lock irq_context: 0 &dev->mutex crngs.lock irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 &dev->mutex cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 &dev->mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex rtnl_mutex subsys mutex#55 irq_context: 0 &dev->mutex rtnl_mutex subsys mutex#55 &k->k_lock irq_context: 0 &dev->mutex rtnl_mutex stack_depot_init_mutex irq_context: 0 &dev->mutex rtnl_mutex crngs.lock irq_context: 0 &dev->mutex rtnl_mutex &sdata->sec_mtx irq_context: 0 &dev->mutex rtnl_mutex &sdata->sec_mtx &sec->lock irq_context: 0 &dev->mutex rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 &dev->mutex rtnl_mutex &local->iflist_mtx#2 irq_context: 0 &dev->mutex rtnl_mutex &local->iflist_mtx#2 &rq->__lock irq_context: 0 &dev->mutex rtnl_mutex &local->iflist_mtx#2 &cfs_rq->removed.lock irq_context: 0 &dev->mutex rtnl_mutex &local->iflist_mtx#2 &obj_hash[i].lock irq_context: 0 &dev->mutex rtnl_mutex &local->iflist_mtx#2 pool_lock#2 irq_context: 0 &dev->mutex hwsim_phys_lock irq_context: 0 &dev->mutex nl_table_lock irq_context: 0 &dev->mutex nl_table_wait.lock irq_context: 0 &dev->mutex rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 &dev->mutex rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex hwsim_phys_lock fs_reclaim irq_context: 0 &dev->mutex hwsim_phys_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex hwsim_phys_lock pool_lock#2 irq_context: 0 &dev->mutex hwsim_phys_lock &____s->seqcount irq_context: 0 xdomain_lock irq_context: 0 xdomain_lock fs_reclaim irq_context: 0 xdomain_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 xdomain_lock pool_lock#2 irq_context: 0 ioctl_mutex irq_context: 0 address_handler_list_lock irq_context: 0 card_mutex irq_context: 0 gdp_mutex &pcp->lock &zone->lock irq_context: 0 gdp_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 subsys mutex#56 irq_context: 0 subsys mutex#56 &k->k_lock irq_context: 0 &x->wait#18 irq_context: 0 &x->wait#18 &p->pi_lock irq_context: 0 &x->wait#18 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#18 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &txlock irq_context: 0 &txlock &list->lock#3 irq_context: 0 &txlock &txwq irq_context: 0 &iocq[i].lock irq_context: 0 &iocq[i].lock &ktiowq[i] irq_context: 0 &txwq irq_context: 0 &txwq &p->pi_lock irq_context: 0 &txwq &p->pi_lock &rq->__lock irq_context: 0 &txwq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh pool_lock#2 irq_context: hardirq &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 subsys mutex#57 irq_context: 0 subsys mutex#57 &k->k_lock irq_context: 0 usb_bus_idr_lock irq_context: 0 usb_bus_idr_lock (usb_notifier_list).rwsem irq_context: 0 table_lock irq_context: 0 table_lock &k->list_lock irq_context: 0 table_lock fs_reclaim irq_context: 0 table_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 table_lock pool_lock#2 irq_context: 0 table_lock lock irq_context: 0 table_lock lock kernfs_idr_lock irq_context: 0 table_lock &root->kernfs_rwsem irq_context: 0 table_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 table_lock &k->k_lock irq_context: 0 table_lock uevent_sock_mutex irq_context: 0 table_lock &obj_hash[i].lock irq_context: 0 table_lock rcu_read_lock &pool->lock irq_context: 0 table_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 table_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 table_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 table_lock running_helpers_waitq.lock irq_context: 0 table_lock (console_sem).lock irq_context: 0 table_lock console_lock console_srcu console_owner_lock irq_context: 0 table_lock console_lock console_srcu console_owner irq_context: 0 table_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 table_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 table_lock &c->lock irq_context: 0 table_lock &____s->seqcount irq_context: 0 table_lock &pcp->lock &zone->lock irq_context: 0 table_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 table_lock &rq->__lock irq_context: 0 table_lock batched_entropy_u8.lock irq_context: 0 table_lock kfence_freelist_lock irq_context: 0 table_lock &base->lock irq_context: 0 table_lock &base->lock &obj_hash[i].lock irq_context: 0 table_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 table_lock &obj_hash[i].lock pool_lock irq_context: softirq (&ipmi_timer) irq_context: softirq (&ipmi_timer) &ipmi_interfaces_srcu irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 table_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 table_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex devtree_lock irq_context: 0 &dev->mutex usb_bus_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock pool_lock#2 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem fs_reclaim irq_context: 0 &dev->mutex (usb_notifier_list).rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex (usb_notifier_list).rwsem pool_lock#2 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem pin_fs_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &x->wait#9 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &obj_hash[i].lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &k->list_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex &k->list_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex fs_reclaim irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem lock kernfs_idr_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem bus_type_sem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem sysfs_symlink_target_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &c->lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &____s->seqcount irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &dev->power.lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem dpm_list_mtx irq_context: 0 &dev->mutex (usb_notifier_list).rwsem req_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &p->pi_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &x->wait#11 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &rq->__lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem uevent_sock_mutex irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &pool->lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem running_helpers_waitq.lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &k->k_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem subsys mutex#57 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem subsys mutex#57 &k->k_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem mon_lock irq_context: 0 &dev->mutex usb_port_peer_mutex irq_context: 0 &dev->mutex device_state_lock irq_context: 0 &dev->mutex usb_bus_idr_lock mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock &bh->lock irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &rq->__lock irq_context: softirq &bh->lock irq_context: softirq lock#6 irq_context: softirq lock#6 kcov_remote_lock irq_context: softirq &x->wait#19 irq_context: 0 &dev->mutex usb_bus_idr_lock &x->wait#19 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->power.lock irq_context: 0 &dev->mutex usb_bus_idr_lock device_links_srcu irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->power.lock &dev->power.lock/1 irq_context: 0 &dev->mutex usb_bus_idr_lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock (console_sem).lock irq_context: 0 &dev->mutex usb_bus_idr_lock console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex usb_bus_idr_lock console_lock console_srcu console_owner irq_context: 0 &dev->mutex usb_bus_idr_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->mutex usb_bus_idr_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex usb_bus_idr_lock input_pool.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &k->list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock lock irq_context: 0 &dev->mutex usb_bus_idr_lock lock kernfs_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock bus_type_sem irq_context: 0 &dev->mutex usb_bus_idr_lock sysfs_symlink_target_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &k->k_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock dpm_list_mtx irq_context: 0 &dev->mutex usb_bus_idr_lock req_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &x->wait#11 irq_context: 0 &dev->mutex usb_bus_idr_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem lock irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem lock kernfs_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock uevent_sock_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &pool->lock irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock running_helpers_waitq.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->power.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &k->list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &k->k_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex device_links_srcu irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex fwnode_link_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex device_links_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex set_config_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex devtree_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &x->wait#9 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &dev->power.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock &bh->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &x->wait#19 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex device_state_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex bus_type_sem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex dpm_list_mtx irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &(&priv->bus_notifier)->rwsem &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex uevent_sock_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &dev->power.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &k->list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &k->k_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &new_driver->dynids.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex device_links_srcu irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex fwnode_link_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex device_links_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &dev->devres_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex pinctrl_list_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex pinctrl_maps_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &dev->power.lock &dev->power.lock/1 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex (console_sem).lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex console_lock console_srcu console_owner irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &dum_hcd->dum->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock &bh->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &x->wait#19 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex hcd_root_hub_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &dum_hcd->dum->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex hcd_root_hub_lock &bh->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &x->wait#19 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &x->wait#9 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &k->list_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex bus_type_sem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dpm_list_mtx irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &k->k_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx &dev->power.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx pm_qos_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex component_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex device_links_srcu irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock &dev->power.lock/1 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx fs_reclaim irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx lock kernfs_idr_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex deferred_probe_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex uevent_sock_mutex irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex probe_waitqueue.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex subsys mutex#58 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &x->wait#9 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex (usb_notifier_list).rwsem irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex deferred_probe_mutex irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex probe_waitqueue.lock irq_context: 0 &dev->mutex usb_bus_idr_lock subsys mutex#58 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &x->wait#9 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &____s->seqcount irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex fs_reclaim irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &dum_hcd->dum->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &bh->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &x->wait#19 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &x->wait#19 &p->pi_lock irq_context: softirq &x->wait#19 &p->pi_lock &rq->__lock irq_context: softirq &x->wait#19 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex (&timer.timer) irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex hcd_root_hub_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) lock#6 irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->power.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->power.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->power.lock &dev->power.wait_queue irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->power.lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &hub->irq_urb_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) (&hub->irq_urb_retry) irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &base->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_urb_unlink_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_root_hub_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_root_hub_lock &bh->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_root_hub_lock &p->pi_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) usb_kill_urb_queue.lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &rq->__lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &rq->__lock &cfs_rq->removed.lock irq_context: softirq usb_kill_urb_queue.lock irq_context: softirq usb_kill_urb_queue.lock &p->pi_lock irq_context: softirq usb_kill_urb_queue.lock &p->pi_lock &rq->__lock irq_context: softirq usb_kill_urb_queue.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) (work_completion)(&hub->tt.clear_work) irq_context: 0 &dev->mutex udc_lock irq_context: 0 &dev->mutex subsys mutex#59 irq_context: 0 &dev->mutex subsys mutex#59 &k->k_lock irq_context: 0 &dev->mutex gadget_id_numbers.xa_lock irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) &root->kernfs_rwsem irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) kernfs_notify_lock irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&gadget->work) kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events kernfs_notify_work irq_context: 0 (wq_completion)events kernfs_notify_work kernfs_notify_lock irq_context: 0 (wq_completion)events kernfs_notify_work &root->kernfs_supers_rwsem irq_context: 0 &dev->mutex subsys mutex#60 irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dum_hcd->dum->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) device_state_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_urb_list_lock irq_context: 0 func_lock irq_context: 0 g_tf_lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &vhci_hcd->vhci->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &vhci_hcd->vhci->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx &____s->seqcount irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &lock->wait_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &lock->wait_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &vhci_hcd->vhci->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq &x->wait#19 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &vhci_hcd->vhci->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &vhci_hcd->vhci->lock irq_context: 0 &type->i_mutex_dir_key#2 &c->lock irq_context: 0 &type->i_mutex_dir_key#2 &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &pcp->lock &zone->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) pool_lock#2 irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) fs_reclaim irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &x->wait#19 irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) (&timer.timer) irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex &hub->status_mutex &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock lock kernfs_idr_lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &pcp->lock &zone->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex lock kernfs_idr_lock &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &c->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &____s->seqcount irq_context: softirq lib/debugobjects.c:101 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 reading_mutex &rq->__lock irq_context: 0 reading_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &x->wait#14 &p->pi_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_sysfs_mtx lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem batched_entropy_u8.lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem kfence_freelist_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &base->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &x->wait#19 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex (&timer.timer) irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &pcp->lock &zone->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &(&priv->bus_notifier)->rwsem lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex usb_bus_idr_lock quarantine_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &base->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock (&timer.timer) irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex hcd->bandwidth_mutex &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &base->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex (&timer.timer) irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &base->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex (&timer.timer) irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex batched_entropy_u8.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex kfence_freelist_lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &dev->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: hardirq &x->wait#5 irq_context: 0 &dev->mutex usb_bus_idr_lock hcd_root_hub_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &pcp->lock &zone->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &lock->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &rq->__lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &lock->wait_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq drivers/char/random.c:251 rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock lock kernfs_idr_lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &rq->__lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &pcp->lock &zone->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &base->lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex usb_bus_idr_lock batched_entropy_u8.lock irq_context: 0 &dev->mutex usb_bus_idr_lock batched_entropy_u8.lock crngs.lock irq_context: 0 &dev->mutex usb_bus_idr_lock kfence_freelist_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &simple_offset_xa_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &simple_offset_xa_lock &____s->seqcount irq_context: softirq usb_kill_urb_queue.lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &pcp->lock &zone->lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx &pcp->lock &zone->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex usb_port_peer_mutex dev_pm_qos_mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex &dev->mutex &hub->status_mutex &c->lock irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex usb_bus_idr_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 &dev->mutex usb_bus_idr_lock &dev->mutex lock kernfs_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) hcd_root_hub_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex &c->lock irq_context: 0 &dev->mutex (usb_notifier_list).rwsem gdp_mutex &____s->seqcount irq_context: 0 &dev->mutex &dev->mutex probe_waitqueue.lock irq_context: 0 &dev->mutex &dev->mutex &lock->wait_lock irq_context: 0 &dev->mutex &dev->mutex &rq->__lock irq_context: 0 &dev->mutex &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &dev->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &dev->mutex rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex &dev->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&hub->init_work)->work) &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &lock->wait_lock irq_context: 0 &dev->mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &dev->mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 input_ida.xa_lock irq_context: 0 input_ida.xa_lock pool_lock#2 irq_context: 0 subsys mutex#31 irq_context: 0 subsys mutex#31 &k->k_lock irq_context: 0 input_mutex input_ida.xa_lock irq_context: 0 input_mutex fs_reclaim irq_context: 0 input_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 input_mutex pool_lock#2 irq_context: 0 input_mutex &x->wait#9 irq_context: 0 input_mutex &obj_hash[i].lock irq_context: 0 input_mutex &dev->mutex#2 irq_context: 0 input_mutex chrdevs_lock irq_context: 0 input_mutex &k->list_lock irq_context: 0 input_mutex &c->lock irq_context: 0 input_mutex &____s->seqcount irq_context: 0 input_mutex lock irq_context: 0 input_mutex lock kernfs_idr_lock irq_context: 0 input_mutex &root->kernfs_rwsem irq_context: 0 input_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 input_mutex bus_type_sem irq_context: 0 input_mutex sysfs_symlink_target_lock irq_context: 0 input_mutex &root->kernfs_rwsem irq_context: 0 input_mutex &dev->power.lock irq_context: 0 input_mutex dpm_list_mtx irq_context: 0 input_mutex req_lock irq_context: 0 input_mutex &p->pi_lock irq_context: 0 input_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 input_mutex &x->wait#11 irq_context: 0 input_mutex &rq->__lock irq_context: 0 input_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 input_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 input_mutex uevent_sock_mutex irq_context: 0 input_mutex &obj_hash[i].lock pool_lock irq_context: 0 input_mutex rcu_read_lock &pool->lock irq_context: 0 input_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 input_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 input_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 input_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 input_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 input_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 input_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 input_mutex running_helpers_waitq.lock irq_context: 0 input_mutex &k->k_lock irq_context: 0 input_mutex subsys mutex#31 irq_context: 0 input_mutex subsys mutex#31 &k->k_lock irq_context: 0 input_mutex &p->pi_lock &rq->__lock irq_context: 0 input_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 input_mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 serio_event_lock irq_context: 0 serio_event_lock pool_lock#2 irq_context: 0 serio_event_lock rcu_read_lock &pool->lock irq_context: 0 serio_event_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 serio_event_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 serio_event_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 serio_event_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &new_driver->dynids.lock irq_context: 0 (wq_completion)events_long irq_context: 0 (wq_completion)events_long serio_event_work irq_context: 0 (wq_completion)events_long serio_event_work serio_mutex irq_context: 0 (wq_completion)events_long serio_event_work serio_mutex serio_event_lock irq_context: 0 (wq_completion)events_long serio_event_work serio_mutex &k->list_lock irq_context: 0 (wq_completion)events_long serio_event_work serio_mutex &k->k_lock irq_context: 0 (wq_completion)events_long serio_event_work serio_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_long serio_event_work serio_mutex pool_lock#2 irq_context: 0 &dev->mutex (efi_runtime_lock).lock irq_context: 0 &dev->mutex &x->wait#12 irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) &x->wait#12 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) &x->wait#12 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)efi_rts_wq (work_completion)(&efi_rts_work.work) &x->wait#12 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex rtc_ida.xa_lock irq_context: 0 &dev->mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 &dev->mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &dev->mutex &rtc->ops_lock irq_context: 0 &dev->mutex &rtc->ops_lock (efi_runtime_lock).lock irq_context: 0 &dev->mutex &rtc->ops_lock &obj_hash[i].lock irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &pool->lock irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &rtc->ops_lock &rq->__lock irq_context: 0 &dev->mutex &rtc->ops_lock &x->wait#12 irq_context: 0 &dev->mutex &rtc->ops_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex &rtc->ops_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex chrdevs_lock irq_context: 0 &dev->mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 &dev->mutex req_lock irq_context: 0 &dev->mutex &x->wait#11 irq_context: 0 &dev->mutex subsys mutex#27 irq_context: 0 &dev->mutex subsys mutex#27 &k->k_lock irq_context: 0 &dev->mutex subsys mutex#27 fs_reclaim irq_context: 0 &dev->mutex subsys mutex#27 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex subsys mutex#27 pool_lock#2 irq_context: 0 &dev->mutex subsys mutex#27 &x->wait#9 irq_context: 0 &dev->mutex subsys mutex#27 &obj_hash[i].lock irq_context: 0 &dev->mutex subsys mutex#27 platform_devid_ida.xa_lock irq_context: 0 &dev->mutex subsys mutex#27 &k->list_lock irq_context: 0 &dev->mutex subsys mutex#27 lock irq_context: 0 &dev->mutex subsys mutex#27 lock kernfs_idr_lock irq_context: 0 &dev->mutex subsys mutex#27 &root->kernfs_rwsem irq_context: 0 &dev->mutex subsys mutex#27 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex subsys mutex#27 bus_type_sem irq_context: 0 &dev->mutex subsys mutex#27 sysfs_symlink_target_lock irq_context: 0 &dev->mutex subsys mutex#27 &c->lock irq_context: 0 &dev->mutex subsys mutex#27 &____s->seqcount irq_context: 0 &dev->mutex subsys mutex#27 &root->kernfs_rwsem irq_context: 0 &dev->mutex subsys mutex#27 &dev->power.lock irq_context: 0 &dev->mutex subsys mutex#27 dpm_list_mtx irq_context: 0 &dev->mutex subsys mutex#27 &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex subsys mutex#27 &(&priv->bus_notifier)->rwsem iommu_probe_device_lock irq_context: 0 &dev->mutex subsys mutex#27 &(&priv->bus_notifier)->rwsem iommu_probe_device_lock iommu_device_lock irq_context: 0 &dev->mutex subsys mutex#27 uevent_sock_mutex irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex subsys mutex#27 running_helpers_waitq.lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &dev->power.lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &k->list_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &k->k_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex device_links_srcu irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex fwnode_link_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex device_links_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex fs_reclaim irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex pool_lock#2 irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &dev->devres_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex pinctrl_list_mutex irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex pinctrl_maps_mutex irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &obj_hash[i].lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex deferred_probe_mutex irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex uevent_sock_mutex irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex subsys mutex#27 &dev->mutex probe_waitqueue.lock irq_context: 0 &dev->mutex subsys mutex#27 subsys mutex#4 irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex subsys mutex#27 &rq->__lock irq_context: 0 &dev->mutex subsys mutex#27 wakeup_ida.xa_lock irq_context: 0 &dev->mutex subsys mutex#27 &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex &k->list_lock irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex fs_reclaim irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex lock irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex subsys mutex#27 gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex subsys mutex#27 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex subsys mutex#27 subsys mutex#13 irq_context: 0 &dev->mutex subsys mutex#27 subsys mutex#13 &k->k_lock irq_context: 0 &dev->mutex subsys mutex#27 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &dev->mutex subsys mutex#27 fill_pool_map-wait-type-override pool_lock irq_context: 0 &dev->mutex subsys mutex#27 events_lock irq_context: 0 &dev->mutex subsys mutex#27 rtcdev_lock irq_context: 0 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 fill_pool_map-wait-type-override &obj_hash[i].lock pool_lock irq_context: 0 g_smscore_deviceslock irq_context: 0 g_smscore_deviceslock fs_reclaim irq_context: 0 g_smscore_deviceslock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 g_smscore_deviceslock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock quarantine_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 &obj_hash[i].lock pool_lock irq_context: 0 cx231xx_devlist_mutex irq_context: 0 em28xx_devlist_mutex irq_context: 0 pvr2_context_sync_data.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) batched_entropy_u8.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) kfence_freelist_lock irq_context: 0 &dev->mutex core_lock irq_context: 0 &dev->mutex core_lock fs_reclaim irq_context: 0 &dev->mutex core_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex core_lock pool_lock#2 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem fs_reclaim irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &c->lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &____s->seqcount irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem pool_lock#2 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem i2c_dev_list_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &x->wait#9 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &obj_hash[i].lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem chrdevs_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &k->list_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex &k->list_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex fs_reclaim irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem lock kernfs_idr_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem bus_type_sem irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem sysfs_symlink_target_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &dev->power.lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem dpm_list_mtx irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem req_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &p->pi_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &x->wait#11 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &rq->__lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem uevent_sock_mutex irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem rcu_read_lock &pool->lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem running_helpers_waitq.lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem &k->k_lock irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem subsys mutex#61 irq_context: 0 &dev->mutex &(&priv->bus_notifier)->rwsem subsys mutex#61 &k->k_lock irq_context: 0 &dev->mutex subsys mutex#62 irq_context: 0 &dev->mutex pin_fs_lock irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 &dev->mutex core_lock &k->list_lock irq_context: 0 &dev->mutex core_lock &k->k_lock irq_context: 0 &dev->mutex dvbdev_register_lock irq_context: 0 &dev->mutex dvbdev_register_lock (console_sem).lock irq_context: 0 &dev->mutex dvbdev_register_lock console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex dvbdev_register_lock console_lock console_srcu console_owner irq_context: 0 &dev->mutex dvbdev_register_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->mutex dvbdev_register_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex (kmod_concurrent_max).lock irq_context: 0 &dev->mutex &x->wait#17 irq_context: 0 &dev->mutex &dev->mutex &dev->power.lock &dev->power.lock/1 irq_context: 0 &dev->mutex &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 &dev->mutex &dev->mutex device_links_srcu irq_context: 0 &dev->mutex &dev->mutex fwnode_link_lock irq_context: 0 &dev->mutex &dev->mutex device_links_lock irq_context: 0 &dev->mutex &dev->mutex fs_reclaim irq_context: 0 &dev->mutex &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &dev->mutex pool_lock#2 irq_context: 0 &dev->mutex &dev->mutex &dev->devres_lock irq_context: 0 &dev->mutex &dev->mutex pinctrl_list_mutex irq_context: 0 &dev->mutex &dev->mutex pinctrl_maps_mutex irq_context: 0 &dev->mutex &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 &dev->mutex &dev->mutex &obj_hash[i].lock irq_context: 0 &dev->mutex &dev->mutex &(&priv->bus_notifier)->rwsem irq_context: 0 &dev->mutex &dev->mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex &dev->mutex lock irq_context: 0 &dev->mutex &dev->mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex &dev->mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &dev->mutex deferred_probe_mutex irq_context: 0 &dev->mutex &dev->mutex uevent_sock_mutex irq_context: 0 &dev->mutex &dev->mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex &dev->mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex &dev->mutex running_helpers_waitq.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock &sighand->siglock &sig->wait_chldexit &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex frontend_mutex irq_context: 0 &dev->mutex frontend_mutex fs_reclaim irq_context: 0 &dev->mutex frontend_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex frontend_mutex pool_lock#2 irq_context: 0 &dev->mutex frontend_mutex (console_sem).lock irq_context: 0 &dev->mutex frontend_mutex console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex frontend_mutex console_lock console_srcu console_owner irq_context: 0 &dev->mutex frontend_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->mutex frontend_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock fs_reclaim irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock pool_lock#2 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock minor_rwsem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &xa->xa_lock#13 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &mdev->graph_mutex irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &mdev->graph_mutex fs_reclaim irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &mdev->graph_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &mdev->graph_mutex pool_lock#2 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock (console_sem).lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock console_lock console_srcu console_owner_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock console_lock console_srcu console_owner irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &x->wait#9 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &obj_hash[i].lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &k->list_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex &k->list_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex fs_reclaim irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex &c->lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex &____s->seqcount irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex pool_lock#2 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock lock kernfs_idr_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock bus_type_sem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock sysfs_symlink_target_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &c->lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &____s->seqcount irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &dev->power.lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock dpm_list_mtx irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock req_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &p->pi_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &x->wait#11 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &rq->__lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock uevent_sock_mutex irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock rcu_read_lock &pool->lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock running_helpers_waitq.lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock &k->k_lock irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock subsys mutex#63 irq_context: 0 &dev->mutex frontend_mutex dvbdev_register_lock subsys mutex#63 &k->k_lock irq_context: 0 &dev->mutex &dmxdev->lock irq_context: 0 &dev->mutex dvbdev_register_lock fs_reclaim irq_context: 0 &dev->mutex dvbdev_register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex dvbdev_register_lock pool_lock#2 irq_context: 0 &dev->mutex dvbdev_register_lock &c->lock irq_context: 0 &dev->mutex dvbdev_register_lock &____s->seqcount irq_context: 0 &dev->mutex dvbdev_register_lock minor_rwsem irq_context: 0 &dev->mutex dvbdev_register_lock &xa->xa_lock#13 irq_context: 0 &dev->mutex dvbdev_register_lock &mdev->graph_mutex irq_context: 0 &dev->mutex dvbdev_register_lock &xa->xa_lock#13 pool_lock#2 irq_context: 0 &dev->mutex dvbdev_register_lock &mdev->graph_mutex fs_reclaim irq_context: 0 &dev->mutex dvbdev_register_lock &mdev->graph_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex dvbdev_register_lock &mdev->graph_mutex pool_lock#2 irq_context: 0 &dev->mutex dvbdev_register_lock &mdev->graph_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex dvbdev_register_lock &pcp->lock &zone->lock irq_context: 0 &dev->mutex dvbdev_register_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex dvbdev_register_lock &x->wait#9 irq_context: 0 &dev->mutex dvbdev_register_lock &obj_hash[i].lock irq_context: 0 &dev->mutex dvbdev_register_lock &k->list_lock irq_context: 0 &dev->mutex dvbdev_register_lock gdp_mutex irq_context: 0 &dev->mutex dvbdev_register_lock gdp_mutex &k->list_lock irq_context: 0 &dev->mutex dvbdev_register_lock lock irq_context: 0 &dev->mutex dvbdev_register_lock lock kernfs_idr_lock irq_context: 0 &dev->mutex dvbdev_register_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex dvbdev_register_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex dvbdev_register_lock bus_type_sem irq_context: 0 &dev->mutex dvbdev_register_lock sysfs_symlink_target_lock irq_context: 0 &dev->mutex dvbdev_register_lock &root->kernfs_rwsem irq_context: 0 &dev->mutex dvbdev_register_lock &dev->power.lock irq_context: 0 &dev->mutex dvbdev_register_lock dpm_list_mtx irq_context: 0 &dev->mutex dvbdev_register_lock req_lock irq_context: 0 &dev->mutex dvbdev_register_lock &p->pi_lock irq_context: 0 &dev->mutex dvbdev_register_lock &x->wait#11 irq_context: 0 &dev->mutex dvbdev_register_lock &rq->__lock irq_context: 0 &dev->mutex dvbdev_register_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex dvbdev_register_lock uevent_sock_mutex irq_context: 0 &dev->mutex dvbdev_register_lock rcu_read_lock &pool->lock irq_context: 0 &dev->mutex dvbdev_register_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex dvbdev_register_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex dvbdev_register_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex dvbdev_register_lock running_helpers_waitq.lock irq_context: 0 &dev->mutex dvbdev_register_lock &k->k_lock irq_context: 0 &dev->mutex dvbdev_register_lock subsys mutex#63 irq_context: 0 &dev->mutex dvbdev_register_lock subsys mutex#63 &k->k_lock irq_context: 0 &dev->mutex &dvbdemux->mutex irq_context: 0 &dev->mutex media_devnode_lock irq_context: 0 &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex subsys mutex#64 irq_context: 0 &dev->mutex videodev_lock irq_context: 0 &dev->mutex subsys mutex#65 irq_context: 0 &dev->mutex subsys mutex#65 &k->k_lock irq_context: 0 &dev->mutex &xa->xa_lock#13 irq_context: 0 &dev->mutex &mdev->graph_mutex irq_context: 0 &dev->mutex &mdev->graph_mutex fs_reclaim irq_context: 0 &dev->mutex &mdev->graph_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &mdev->graph_mutex pool_lock#2 irq_context: 0 &dev->mutex vimc_sensor:396:(&vsensor->hdl)->_lock irq_context: 0 &dev->mutex &v4l2_dev->lock irq_context: 0 &dev->mutex vimc_debayer:581:(&vdebayer->hdl)->_lock irq_context: 0 &dev->mutex vimc_lens:61:(&vlens->hdl)->_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &rq->__lock irq_context: 0 &dev->mutex lock kernfs_idr_lock &c->lock irq_context: 0 &dev->mutex tk_core.seq.seqcount irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1608:(hdl_user_vid)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1616:(hdl_loop_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1618:(hdl_fb)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1622:(hdl_vid_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1625:(hdl_vbi_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1627:(hdl_vbi_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1630:(hdl_radio_rx)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1632:(hdl_radio_tx)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1634:(hdl_sdr_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1636:(hdl_meta_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1638:(hdl_meta_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1640:(hdl_tch_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock fs_reclaim irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock pool_lock#2 irq_context: 0 &dev->mutex vivid_ctrls:1608:(hdl_user_vid)->_lock vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1608:(hdl_user_vid)->_lock fs_reclaim irq_context: 0 &dev->mutex vivid_ctrls:1608:(hdl_user_vid)->_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex vivid_ctrls:1608:(hdl_user_vid)->_lock pool_lock#2 irq_context: 0 &dev->mutex vivid_ctrls:1608:(hdl_user_vid)->_lock &c->lock irq_context: 0 &dev->mutex vivid_ctrls:1608:(hdl_user_vid)->_lock &____s->seqcount irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock fs_reclaim irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock pool_lock#2 irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock fs_reclaim irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock pool_lock#2 irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock fs_reclaim irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock pool_lock#2 irq_context: 0 &dev->mutex vivid_ctrls:1616:(hdl_loop_cap)->_lock vivid_ctrls:1620:(hdl_vid_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1616:(hdl_loop_cap)->_lock fs_reclaim irq_context: 0 &dev->mutex vivid_ctrls:1616:(hdl_loop_cap)->_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex vivid_ctrls:1616:(hdl_loop_cap)->_lock pool_lock#2 irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1622:(hdl_vid_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock &c->lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock &____s->seqcount irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock vivid_ctrls:1622:(hdl_vid_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1622:(hdl_vid_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1625:(hdl_vbi_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1625:(hdl_vbi_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1614:(hdl_sdtv_cap)->_lock vivid_ctrls:1625:(hdl_vbi_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1616:(hdl_loop_cap)->_lock vivid_ctrls:1625:(hdl_vbi_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1627:(hdl_vbi_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1627:(hdl_vbi_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1630:(hdl_radio_rx)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock vivid_ctrls:1630:(hdl_radio_rx)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1632:(hdl_radio_tx)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1610:(hdl_user_aud)->_lock vivid_ctrls:1632:(hdl_radio_tx)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1634:(hdl_sdr_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1634:(hdl_sdr_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1636:(hdl_meta_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1636:(hdl_meta_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1638:(hdl_meta_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1638:(hdl_meta_out)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock vivid_ctrls:1640:(hdl_tch_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock vivid_ctrls:1640:(hdl_tch_cap)->_lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock &c->lock irq_context: 0 &dev->mutex vivid_ctrls:1612:(hdl_streaming)->_lock &____s->seqcount irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock &zone->lock irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex vivid_ctrls:1606:(hdl_user_gen)->_lock &obj_hash[i].lock irq_context: 0 &adap->kthread_waitq irq_context: 0 &dev->cec_xfers_slock irq_context: 0 &dev->kthread_waitq_cec irq_context: 0 &dev->mutex cec_devnode_lock irq_context: 0 &dev->mutex subsys mutex#66 irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 &dev->mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 &dev->mutex &adap->lock irq_context: 0 &dev->mutex &adap->lock tk_core.seq.seqcount irq_context: 0 &dev->mutex &adap->lock &adap->devnode.lock_fhs irq_context: 0 ptp_clocks_map.xa_lock irq_context: 0 subsys mutex#67 irq_context: 0 subsys mutex#67 &k->k_lock irq_context: 0 pers_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock pool_lock#2 irq_context: 0 _lock irq_context: 0 dm_bufio_clients_lock irq_context: 0 _ps_lock irq_context: 0 _lock#2 irq_context: 0 _lock#3 irq_context: 0 register_lock#2 irq_context: 0 subsys mutex#68 irq_context: 0 subsys mutex#68 &k->k_lock irq_context: 0 bp_lock irq_context: 0 bp_lock irq_context: 0 subsys mutex#69 irq_context: 0 subsys mutex#69 &k->k_lock irq_context: 0 free_vmap_area_lock &obj_hash[i].lock irq_context: 0 free_vmap_area_lock pool_lock#2 irq_context: 0 lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 lock kernfs_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&dsp_spl_tl) irq_context: softirq (&dsp_spl_tl) dsp_lock irq_context: softirq (&dsp_spl_tl) dsp_lock iclock_lock irq_context: softirq (&dsp_spl_tl) dsp_lock iclock_lock tk_core.seq.seqcount irq_context: softirq (&dsp_spl_tl) dsp_lock &obj_hash[i].lock irq_context: softirq (&dsp_spl_tl) dsp_lock &base->lock irq_context: softirq (&dsp_spl_tl) dsp_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex lock#7 irq_context: 0 iscsi_transport_lock irq_context: 0 subsys mutex#70 irq_context: 0 subsys mutex#70 &k->k_lock irq_context: 0 link_ops_rwsem irq_context: 0 &tx_task->waiting irq_context: 0 disable_lock irq_context: 0 disable_lock fs_reclaim irq_context: 0 disable_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 disable_lock pool_lock#2 irq_context: 0 disable_lock &x->wait#9 irq_context: 0 disable_lock &obj_hash[i].lock irq_context: 0 disable_lock &c->lock irq_context: 0 disable_lock &pcp->lock &zone->lock irq_context: 0 disable_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 disable_lock &____s->seqcount irq_context: 0 disable_lock &k->list_lock irq_context: 0 disable_lock lock irq_context: 0 disable_lock lock kernfs_idr_lock irq_context: 0 disable_lock &root->kernfs_rwsem irq_context: 0 disable_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 disable_lock bus_type_sem irq_context: 0 disable_lock sysfs_symlink_target_lock irq_context: 0 disable_lock &k->k_lock irq_context: 0 disable_lock &root->kernfs_rwsem irq_context: 0 disable_lock &dev->power.lock irq_context: 0 disable_lock dpm_list_mtx irq_context: 0 disable_lock &(&priv->bus_notifier)->rwsem irq_context: 0 disable_lock &(&priv->bus_notifier)->rwsem iommu_probe_device_lock irq_context: 0 disable_lock &(&priv->bus_notifier)->rwsem iommu_probe_device_lock iommu_device_lock irq_context: 0 disable_lock uevent_sock_mutex irq_context: 0 disable_lock rcu_read_lock &pool->lock irq_context: 0 disable_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 disable_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 disable_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 disable_lock running_helpers_waitq.lock irq_context: 0 disable_lock &dev->mutex &dev->power.lock irq_context: 0 disable_lock &dev->mutex &k->list_lock irq_context: 0 disable_lock &dev->mutex &k->k_lock irq_context: 0 disable_lock subsys mutex#4 irq_context: 0 protocol_lock irq_context: 0 protocol_lock &____s->seqcount irq_context: 0 protocol_lock pool_lock#2 irq_context: 0 psinfo_lock irq_context: 0 psinfo_lock fs_reclaim irq_context: 0 psinfo_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 psinfo_lock pool_lock#2 irq_context: 0 psinfo_lock free_vmap_area_lock irq_context: 0 psinfo_lock vmap_area_lock irq_context: 0 psinfo_lock &____s->seqcount irq_context: 0 psinfo_lock init_mm.page_table_lock irq_context: 0 psinfo_lock &pcp->lock &zone->lock irq_context: 0 psinfo_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 psinfo_lock (console_sem).lock irq_context: 0 psinfo_lock console_lock console_srcu console_owner_lock irq_context: 0 psinfo_lock console_lock console_srcu console_owner irq_context: 0 psinfo_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 psinfo_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 psinfo_lock &rq->__lock irq_context: 0 psinfo_lock pstore_sb_lock irq_context: 0 psinfo_lock dump_list_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 vsock_register_mutex irq_context: 0 comedi_drivers_list_lock irq_context: 0 &sb->s_type->i_mutex_key &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key &obj_hash[i].lock irq_context: 0 &domain->mutex irq_context: 0 &domain->mutex sparse_irq_lock irq_context: 0 &domain->mutex sparse_irq_lock fs_reclaim irq_context: 0 &domain->mutex sparse_irq_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &domain->mutex sparse_irq_lock &c->lock irq_context: 0 &domain->mutex sparse_irq_lock &____s->seqcount irq_context: 0 &domain->mutex sparse_irq_lock pool_lock#2 irq_context: 0 &domain->mutex sparse_irq_lock pcpu_alloc_mutex irq_context: 0 &domain->mutex sparse_irq_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 &domain->mutex sparse_irq_lock &obj_hash[i].lock irq_context: 0 &domain->mutex sparse_irq_lock lock irq_context: 0 &domain->mutex sparse_irq_lock lock kernfs_idr_lock irq_context: 0 &domain->mutex sparse_irq_lock &root->kernfs_rwsem irq_context: 0 &domain->mutex sparse_irq_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &domain->mutex sparse_irq_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 &domain->mutex fs_reclaim irq_context: 0 &domain->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &domain->mutex pool_lock#2 irq_context: 0 &domain->mutex &irq_desc_lock_class irq_context: 0 &desc->request_mutex irq_context: 0 &desc->request_mutex &irq_desc_lock_class irq_context: 0 &desc->request_mutex &irq_desc_lock_class irq_controller_lock irq_context: 0 cscfg_mutex irq_context: 0 cscfg_mutex fs_reclaim irq_context: 0 cscfg_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cscfg_mutex pool_lock#2 irq_context: 0 cscfg_mutex &x->wait#9 irq_context: 0 cscfg_mutex &obj_hash[i].lock irq_context: 0 cscfg_mutex &k->list_lock irq_context: 0 cscfg_mutex lock irq_context: 0 cscfg_mutex lock kernfs_idr_lock irq_context: 0 cscfg_mutex &root->kernfs_rwsem irq_context: 0 cscfg_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 cscfg_mutex bus_type_sem irq_context: 0 cscfg_mutex &root->kernfs_rwsem irq_context: 0 cscfg_mutex &dev->power.lock irq_context: 0 cscfg_mutex dpm_list_mtx irq_context: 0 fs_reclaim icc_bw_lock irq_context: 0 subsys mutex#71 irq_context: 0 subsys mutex#71 &k->k_lock irq_context: 0 snd_ctl_layer_rwsem irq_context: 0 snd_card_mutex irq_context: 0 snd_ioctl_rwsem irq_context: 0 strings irq_context: 0 strings fs_reclaim irq_context: 0 strings fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 strings pool_lock#2 irq_context: 0 register_mutex irq_context: 0 sound_mutex irq_context: 0 sound_mutex fs_reclaim irq_context: 0 sound_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sound_mutex pool_lock#2 irq_context: 0 sound_mutex &k->list_lock irq_context: 0 sound_mutex gdp_mutex irq_context: 0 sound_mutex gdp_mutex &k->list_lock irq_context: 0 sound_mutex lock irq_context: 0 sound_mutex lock kernfs_idr_lock irq_context: 0 sound_mutex &root->kernfs_rwsem irq_context: 0 sound_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sound_mutex bus_type_sem irq_context: 0 sound_mutex sysfs_symlink_target_lock irq_context: 0 sound_mutex &root->kernfs_rwsem irq_context: 0 sound_mutex &dev->power.lock irq_context: 0 sound_mutex dpm_list_mtx irq_context: 0 sound_mutex req_lock irq_context: 0 sound_mutex &p->pi_lock irq_context: 0 sound_mutex &p->pi_lock &rq->__lock irq_context: 0 sound_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sound_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sound_mutex &rq->__lock irq_context: 0 sound_mutex &x->wait#11 irq_context: 0 sound_mutex &obj_hash[i].lock irq_context: 0 sound_mutex &c->lock irq_context: 0 sound_mutex &____s->seqcount irq_context: 0 sound_mutex uevent_sock_mutex irq_context: 0 sound_mutex rcu_read_lock &pool->lock irq_context: 0 sound_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sound_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 sound_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sound_mutex running_helpers_waitq.lock irq_context: 0 sound_mutex subsys mutex#71 irq_context: 0 sound_mutex subsys mutex#71 &k->k_lock irq_context: 0 register_mutex#2 irq_context: 0 register_mutex#3 irq_context: 0 register_mutex#3 fs_reclaim irq_context: 0 register_mutex#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 register_mutex#3 pool_lock#2 irq_context: 0 register_mutex#3 sound_mutex irq_context: 0 register_mutex#3 sound_mutex fs_reclaim irq_context: 0 register_mutex#3 sound_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 register_mutex#3 sound_mutex pool_lock#2 irq_context: 0 register_mutex#3 sound_mutex &k->list_lock irq_context: 0 register_mutex#3 sound_mutex gdp_mutex irq_context: 0 register_mutex#3 sound_mutex gdp_mutex &k->list_lock irq_context: 0 register_mutex#3 sound_mutex &c->lock irq_context: 0 register_mutex#3 sound_mutex &____s->seqcount irq_context: 0 register_mutex#3 sound_mutex lock irq_context: 0 register_mutex#3 sound_mutex lock kernfs_idr_lock irq_context: 0 register_mutex#3 sound_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 register_mutex#3 sound_mutex &root->kernfs_rwsem irq_context: 0 register_mutex#3 sound_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 register_mutex#3 sound_mutex bus_type_sem irq_context: 0 register_mutex#3 sound_mutex sysfs_symlink_target_lock irq_context: 0 register_mutex#3 sound_mutex &root->kernfs_rwsem irq_context: 0 register_mutex#3 sound_mutex &dev->power.lock irq_context: 0 register_mutex#3 sound_mutex dpm_list_mtx irq_context: 0 register_mutex#3 sound_mutex req_lock irq_context: 0 register_mutex#3 sound_mutex &p->pi_lock irq_context: 0 register_mutex#3 sound_mutex &p->pi_lock &rq->__lock irq_context: 0 register_mutex#3 sound_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 register_mutex#3 sound_mutex &x->wait#11 irq_context: 0 register_mutex#3 sound_mutex &rq->__lock irq_context: 0 register_mutex#3 sound_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 register_mutex#3 sound_mutex &cfs_rq->removed.lock irq_context: 0 register_mutex#3 sound_mutex &obj_hash[i].lock irq_context: 0 register_mutex#3 sound_mutex uevent_sock_mutex irq_context: 0 register_mutex#3 sound_mutex rcu_read_lock &pool->lock irq_context: 0 register_mutex#3 sound_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 register_mutex#3 sound_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 register_mutex#3 sound_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 register_mutex#3 sound_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 register_mutex#3 sound_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 register_mutex#3 sound_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 register_mutex#3 sound_mutex running_helpers_waitq.lock irq_context: 0 register_mutex#3 sound_mutex subsys mutex#71 irq_context: 0 register_mutex#3 sound_mutex subsys mutex#71 &k->k_lock irq_context: 0 register_mutex#3 clients_lock irq_context: 0 &client->ports_mutex irq_context: 0 &client->ports_mutex &client->ports_lock irq_context: 0 register_mutex#4 irq_context: 0 register_mutex#4 fs_reclaim irq_context: 0 register_mutex#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 register_mutex#4 pool_lock#2 irq_context: 0 register_mutex#4 sound_oss_mutex irq_context: 0 register_mutex#4 sound_oss_mutex fs_reclaim irq_context: 0 register_mutex#4 sound_oss_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 register_mutex#4 sound_oss_mutex pool_lock#2 irq_context: 0 register_mutex#4 sound_oss_mutex sound_loader_lock irq_context: 0 register_mutex#4 sound_oss_mutex &x->wait#9 irq_context: 0 register_mutex#4 sound_oss_mutex &obj_hash[i].lock irq_context: 0 register_mutex#4 sound_oss_mutex &k->list_lock irq_context: 0 register_mutex#4 sound_oss_mutex gdp_mutex irq_context: 0 register_mutex#4 sound_oss_mutex gdp_mutex &k->list_lock irq_context: 0 register_mutex#4 sound_oss_mutex lock irq_context: 0 register_mutex#4 sound_oss_mutex lock kernfs_idr_lock irq_context: 0 register_mutex#4 sound_oss_mutex &root->kernfs_rwsem irq_context: 0 register_mutex#4 sound_oss_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 register_mutex#4 sound_oss_mutex bus_type_sem irq_context: 0 register_mutex#4 sound_oss_mutex sysfs_symlink_target_lock irq_context: 0 register_mutex#4 sound_oss_mutex &root->kernfs_rwsem irq_context: 0 register_mutex#4 sound_oss_mutex &c->lock irq_context: 0 register_mutex#4 sound_oss_mutex &____s->seqcount irq_context: 0 register_mutex#4 sound_oss_mutex &dev->power.lock irq_context: 0 register_mutex#4 sound_oss_mutex dpm_list_mtx irq_context: 0 register_mutex#4 sound_oss_mutex req_lock irq_context: 0 register_mutex#4 sound_oss_mutex &p->pi_lock irq_context: 0 register_mutex#4 sound_oss_mutex &p->pi_lock &rq->__lock irq_context: 0 register_mutex#4 sound_oss_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 register_mutex#4 sound_oss_mutex &x->wait#11 irq_context: 0 register_mutex#4 sound_oss_mutex &rq->__lock irq_context: 0 register_mutex#4 sound_oss_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 register_mutex#4 sound_oss_mutex uevent_sock_mutex irq_context: 0 register_mutex#4 sound_oss_mutex rcu_read_lock &pool->lock irq_context: 0 register_mutex#4 sound_oss_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 register_mutex#4 sound_oss_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 register_mutex#4 sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 register_mutex#4 sound_oss_mutex running_helpers_waitq.lock irq_context: 0 register_mutex#4 sound_oss_mutex subsys mutex#71 irq_context: 0 register_mutex#4 sound_oss_mutex subsys mutex#71 &k->k_lock irq_context: 0 clients_lock irq_context: 0 &client->ports_lock irq_context: 0 &grp->list_mutex/1 irq_context: 0 &grp->list_mutex#2 irq_context: 0 &grp->list_mutex#2 &grp->list_lock irq_context: 0 &grp->list_mutex/1 clients_lock irq_context: 0 &grp->list_mutex/1 &client->ports_lock irq_context: 0 (wq_completion)events async_lookup_work irq_context: 0 (wq_completion)events async_lookup_work fs_reclaim irq_context: 0 (wq_completion)events async_lookup_work fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events async_lookup_work pool_lock#2 irq_context: 0 (wq_completion)events async_lookup_work clients_lock irq_context: 0 (wq_completion)events async_lookup_work &client->ports_lock irq_context: 0 (wq_completion)events async_lookup_work snd_card_mutex irq_context: 0 (wq_completion)events async_lookup_work (kmod_concurrent_max).lock irq_context: 0 (wq_completion)events async_lookup_work &obj_hash[i].lock irq_context: 0 (wq_completion)events async_lookup_work rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events async_lookup_work rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events async_lookup_work rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events async_lookup_work rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events async_lookup_work &x->wait#17 irq_context: 0 (wq_completion)events async_lookup_work sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events async_lookup_work &rq->__lock irq_context: 0 (wq_completion)events async_lookup_work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &grp->list_mutex/1 register_lock#3 irq_context: 0 &grp->list_mutex/1 fs_reclaim irq_context: 0 &grp->list_mutex/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &grp->list_mutex/1 &____s->seqcount irq_context: 0 &grp->list_mutex/1 pool_lock#2 irq_context: 0 &grp->list_mutex/1 &c->lock irq_context: 0 (wq_completion)events async_lookup_work running_helpers_waitq.lock irq_context: 0 (wq_completion)events async_lookup_work autoload_work irq_context: 0 (wq_completion)events async_lookup_work &x->wait#10 irq_context: 0 (wq_completion)events async_lookup_work sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events async_lookup_work sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events async_lookup_work sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex snd_card_mutex irq_context: 0 &dev->mutex &entry->access irq_context: 0 &dev->mutex info_mutex irq_context: 0 &dev->mutex info_mutex proc_subdir_lock irq_context: 0 &dev->mutex info_mutex fs_reclaim irq_context: 0 &dev->mutex info_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex info_mutex &c->lock irq_context: 0 &dev->mutex info_mutex &____s->seqcount irq_context: 0 &dev->mutex info_mutex pool_lock#2 irq_context: 0 &dev->mutex info_mutex proc_inum_ida.xa_lock irq_context: 0 &dev->mutex info_mutex proc_subdir_lock irq_context: 0 &dev->mutex &card->controls_rwsem irq_context: 0 &dev->mutex &card->controls_rwsem &xa->xa_lock#14 irq_context: 0 &dev->mutex &card->controls_rwsem &xa->xa_lock#14 pool_lock#2 irq_context: 0 &dev->mutex &card->controls_rwsem fs_reclaim irq_context: 0 &dev->mutex &card->controls_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &card->controls_rwsem &xa->xa_lock#14 &c->lock irq_context: 0 &dev->mutex &card->controls_rwsem &xa->xa_lock#14 &____s->seqcount irq_context: 0 &dev->mutex &card->controls_rwsem &card->ctl_files_rwlock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem irq_context: 0 &dev->mutex &card->controls_rwsem &xa->xa_lock#14 &pcp->lock &zone->lock irq_context: 0 &dev->mutex &card->controls_rwsem &xa->xa_lock#14 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex subsys mutex#71 irq_context: 0 &dev->mutex subsys mutex#71 &k->k_lock irq_context: 0 &dev->mutex register_mutex#2 irq_context: 0 &dev->mutex register_mutex#2 fs_reclaim irq_context: 0 &dev->mutex register_mutex#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex register_mutex#2 pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 sound_mutex irq_context: 0 &dev->mutex register_mutex#2 sound_mutex fs_reclaim irq_context: 0 &dev->mutex register_mutex#2 sound_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex register_mutex#2 sound_mutex pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &k->list_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex register_mutex#2 sound_mutex bus_type_sem irq_context: 0 &dev->mutex register_mutex#2 sound_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &c->lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &____s->seqcount irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &dev->power.lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex dpm_list_mtx irq_context: 0 &dev->mutex register_mutex#2 sound_mutex req_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &p->pi_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &rq->__lock irq_context: 0 (wq_completion)events autoload_work irq_context: 0 (wq_completion)events autoload_work &k->list_lock irq_context: 0 (wq_completion)events autoload_work &k->k_lock irq_context: 0 (wq_completion)events (work_completion)(&barr->work) irq_context: 0 (wq_completion)events (work_completion)(&barr->work) &x->wait#10 irq_context: 0 (wq_completion)events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &x->wait#11 irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex uevent_sock_mutex irq_context: 0 &dev->mutex register_mutex#2 sound_mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 sound_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &k->k_lock irq_context: 0 &dev->mutex register_mutex#2 sound_mutex subsys mutex#71 irq_context: 0 &dev->mutex register_mutex#2 sound_mutex subsys mutex#71 &k->k_lock irq_context: 0 &dev->mutex register_mutex#2 &obj_hash[i].lock irq_context: 0 &dev->mutex register_mutex#2 register_mutex irq_context: 0 &dev->mutex register_mutex#2 sound_mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex register_mutex#2 &c->lock irq_context: 0 &dev->mutex register_mutex#2 &____s->seqcount irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex fs_reclaim irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex sound_loader_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &x->wait#9 irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &k->list_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex bus_type_sem irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &dev->power.lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex dpm_list_mtx irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex req_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &p->pi_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &rq->__lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &x->wait#11 irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &c->lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &____s->seqcount irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex uevent_sock_mutex irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &k->k_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex subsys mutex#71 irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex subsys mutex#71 &k->k_lock irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 sound_oss_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_node_0 irq_context: 0 &dev->mutex register_mutex#2 strings irq_context: 0 &dev->mutex register_mutex#2 strings fs_reclaim irq_context: 0 &dev->mutex register_mutex#2 strings fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex register_mutex#2 strings pool_lock#2 irq_context: 0 &dev->mutex register_mutex#2 &entry->access irq_context: 0 &dev->mutex register_mutex#2 info_mutex irq_context: 0 &dev->mutex sound_mutex irq_context: 0 &dev->mutex sound_mutex fs_reclaim irq_context: 0 &dev->mutex sound_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex sound_mutex pool_lock#2 irq_context: 0 &dev->mutex sound_mutex &rq->__lock irq_context: 0 &dev->mutex sound_mutex &cfs_rq->removed.lock irq_context: 0 &dev->mutex sound_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex sound_mutex &k->list_lock irq_context: 0 &dev->mutex sound_mutex lock irq_context: 0 &dev->mutex sound_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex sound_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex sound_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex sound_mutex bus_type_sem irq_context: 0 &dev->mutex sound_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex sound_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex sound_mutex &c->lock irq_context: 0 &dev->mutex sound_mutex &____s->seqcount irq_context: 0 &dev->mutex sound_mutex &dev->power.lock irq_context: 0 &dev->mutex sound_mutex dpm_list_mtx irq_context: 0 &dev->mutex sound_mutex req_lock irq_context: 0 &dev->mutex sound_mutex &p->pi_lock irq_context: 0 &dev->mutex sound_mutex &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex sound_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex sound_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex sound_mutex &x->wait#11 irq_context: 0 &dev->mutex sound_mutex uevent_sock_mutex irq_context: 0 &dev->mutex sound_mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex sound_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex sound_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex sound_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex sound_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex sound_mutex &k->k_lock irq_context: 0 &dev->mutex sound_mutex subsys mutex#71 irq_context: 0 &dev->mutex sound_mutex subsys mutex#71 &k->k_lock irq_context: 0 &dev->mutex &card->controls_rwsem irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem snd_ctl_led_mutex irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem fs_reclaim irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem pool_lock#2 irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &x->wait#9 irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &obj_hash[i].lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &k->list_lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem lock kernfs_idr_lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem bus_type_sem irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &root->kernfs_rwsem irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &c->lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &____s->seqcount irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &dev->power.lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem dpm_list_mtx irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &k->k_lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem sysfs_symlink_target_lock irq_context: 0 &dev->mutex sound_oss_mutex irq_context: 0 &dev->mutex sound_oss_mutex fs_reclaim irq_context: 0 &dev->mutex sound_oss_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex sound_oss_mutex pool_lock#2 irq_context: 0 &dev->mutex sound_oss_mutex sound_loader_lock irq_context: 0 &dev->mutex sound_oss_mutex &x->wait#9 irq_context: 0 &dev->mutex sound_oss_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex sound_oss_mutex &k->list_lock irq_context: 0 &dev->mutex sound_oss_mutex lock irq_context: 0 &dev->mutex sound_oss_mutex lock kernfs_idr_lock irq_context: 0 &dev->mutex sound_oss_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex sound_oss_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &dev->mutex sound_oss_mutex bus_type_sem irq_context: 0 &dev->mutex sound_oss_mutex sysfs_symlink_target_lock irq_context: 0 &dev->mutex sound_oss_mutex &root->kernfs_rwsem irq_context: 0 &dev->mutex sound_oss_mutex &dev->power.lock irq_context: 0 &dev->mutex sound_oss_mutex dpm_list_mtx irq_context: 0 &dev->mutex sound_oss_mutex req_lock irq_context: 0 &dev->mutex sound_oss_mutex &p->pi_lock irq_context: 0 &dev->mutex sound_oss_mutex &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex sound_oss_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex sound_oss_mutex &rq->__lock irq_context: 0 &dev->mutex sound_oss_mutex &x->wait#11 irq_context: 0 &dev->mutex sound_oss_mutex uevent_sock_mutex irq_context: 0 &dev->mutex sound_oss_mutex rcu_read_lock &pool->lock irq_context: 0 &dev->mutex sound_oss_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &dev->mutex sound_oss_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &dev->mutex sound_oss_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex sound_oss_mutex running_helpers_waitq.lock irq_context: 0 &dev->mutex sound_oss_mutex &k->k_lock irq_context: 0 &dev->mutex sound_oss_mutex subsys mutex#71 irq_context: 0 &dev->mutex sound_oss_mutex subsys mutex#71 &k->k_lock irq_context: 0 &dev->mutex strings irq_context: 0 &dev->mutex strings fs_reclaim irq_context: 0 &dev->mutex strings fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex strings pool_lock#2 irq_context: 0 &dev->mutex &card->controls_rwsem fs_reclaim irq_context: 0 &dev->mutex &card->controls_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &card->controls_rwsem pool_lock#2 irq_context: 0 &dev->mutex &card->controls_rwsem &c->lock irq_context: 0 &dev->mutex &card->controls_rwsem &____s->seqcount irq_context: 0 &dev->mutex register_mutex#2 &pcp->lock &zone->lock irq_context: 0 &dev->mutex register_mutex#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex sound_mutex lock kernfs_idr_lock &c->lock irq_context: 0 &dev->mutex sound_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 &dev->mutex sound_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &pcp->lock &zone->lock irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex sound_oss_mutex &c->lock irq_context: 0 &dev->mutex sound_oss_mutex &____s->seqcount irq_context: 0 &dev->mutex register_mutex#5 irq_context: 0 &dev->mutex register_mutex#3 irq_context: 0 &dev->mutex register_mutex#3 fs_reclaim irq_context: 0 &dev->mutex register_mutex#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex register_mutex#3 pool_lock#2 irq_context: 0 &dev->mutex register_mutex#3 clients_lock irq_context: 0 &dev->mutex clients_lock irq_context: 0 &dev->mutex &client->ports_lock irq_context: 0 &dev->mutex &grp->list_mutex/1 irq_context: 0 &dev->mutex &grp->list_mutex/1 clients_lock irq_context: 0 &dev->mutex &grp->list_mutex/1 &client->ports_lock irq_context: 0 &dev->mutex &client->ports_mutex irq_context: 0 &dev->mutex &client->ports_mutex &client->ports_lock irq_context: 0 &dev->mutex &grp->list_mutex/1 register_lock#3 irq_context: 0 &dev->mutex &grp->list_mutex/1 fs_reclaim irq_context: 0 &dev->mutex &grp->list_mutex/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &grp->list_mutex/1 pool_lock#2 irq_context: 0 &dev->mutex sound_oss_mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex sound_oss_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex strings &c->lock irq_context: 0 &dev->mutex strings &____s->seqcount irq_context: 0 &dev->mutex sound_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &card->controls_rwsem snd_ctl_layer_rwsem &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex client_mutex irq_context: 0 &dev->mutex client_mutex fs_reclaim irq_context: 0 &dev->mutex client_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex client_mutex pool_lock#2 irq_context: 0 &dev->mutex client_mutex &dev->devres_lock irq_context: 0 cgroup_threadgroup_rwsem &rq->__lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex failover_lock irq_context: 0 llc_sap_list_lock irq_context: 0 llc_sap_list_lock pool_lock#2 irq_context: 0 act_id_mutex irq_context: 0 act_id_mutex fs_reclaim irq_context: 0 act_id_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 act_id_mutex pool_lock#2 irq_context: 0 act_mod_lock irq_context: 0 ife_mod_lock irq_context: 0 rtnl_mutex &cfs_rq->removed.lock irq_context: 0 cls_mod_lock irq_context: 0 ematch_mod_lock irq_context: 0 sock_diag_table_mutex irq_context: 0 nfnl_subsys_acct irq_context: 0 nfnl_subsys_queue irq_context: 0 nfnl_subsys_ulog irq_context: 0 nf_log_mutex irq_context: 0 nfnl_subsys_osf irq_context: 0 nf_sockopt_mutex irq_context: 0 nfnl_subsys_ctnetlink irq_context: 0 nfnl_subsys_ctnetlink_exp irq_context: 0 pernet_ops_rwsem nf_ct_ecache_mutex irq_context: 0 nfnl_subsys_cttimeout irq_context: 0 nfnl_subsys_cthelper irq_context: 0 nf_ct_helper_mutex irq_context: 0 pernet_ops_rwsem nf_log_mutex irq_context: 0 nf_conntrack_expect_lock irq_context: 0 nf_ct_nat_helpers_mutex irq_context: 0 nfnl_subsys_nftables irq_context: 0 nfnl_subsys_nftcompat irq_context: 0 masq_mutex irq_context: 0 masq_mutex pernet_ops_rwsem irq_context: 0 masq_mutex pernet_ops_rwsem rtnl_mutex irq_context: 0 masq_mutex (inetaddr_chain).rwsem irq_context: 0 masq_mutex inet6addr_chain.lock irq_context: 0 &xt[i].mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex &tn->lock irq_context: 0 subsys mutex#72 irq_context: 0 subsys mutex#72 &k->k_lock irq_context: 0 nfnl_subsys_ipset irq_context: 0 ip_set_type_mutex irq_context: 0 slab_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem ipvs->est_mutex irq_context: 0 pernet_ops_rwsem ipvs->est_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem ipvs->est_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem &base->lock irq_context: 0 pernet_ops_rwsem &base->lock &obj_hash[i].lock irq_context: 0 ip_vs_sched_mutex irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex pool_lock#2 irq_context: 0 ip_vs_pe_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &cfs_rq->removed.lock irq_context: 0 tunnel4_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_node_0 irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &base->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock rcu_node_0 irq_context: 0 xfrm4_protocol_mutex irq_context: 0 &xt[i].mutex fs_reclaim irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &xt[i].mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem net_generic_ids.xa_lock pool_lock#2 irq_context: 0 &xt[i].mutex &____s->seqcount irq_context: 0 inet_diag_table_mutex irq_context: 0 xfrm_km_lock irq_context: 0 xfrm6_protocol_mutex irq_context: 0 tunnel6_mutex irq_context: 0 xfrm_if_cb_lock irq_context: 0 inetsw6_lock irq_context: 0 &hashinfo->lock#2 irq_context: 0 pernet_ops_rwsem &hashinfo->lock#2 irq_context: 0 pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock irq_context: 0 rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (crypto_chain).rwsem irq_context: 0 (crypto_chain).rwsem fs_reclaim irq_context: 0 (crypto_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (crypto_chain).rwsem &c->lock irq_context: 0 (crypto_chain).rwsem &____s->seqcount irq_context: 0 (crypto_chain).rwsem pool_lock#2 irq_context: 0 (crypto_chain).rwsem kthread_create_lock irq_context: 0 (crypto_chain).rwsem &p->pi_lock irq_context: 0 (crypto_chain).rwsem &x->wait irq_context: 0 (crypto_chain).rwsem &rq->__lock irq_context: 0 (crypto_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (crypto_chain).rwsem &obj_hash[i].lock irq_context: 0 (crypto_chain).rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &x->wait#20 irq_context: 0 &x->wait#20 &p->pi_lock irq_context: 0 &p->alloc_lock &x->wait irq_context: 0 pernet_ops_rwsem rtnl_mutex batched_entropy_u32.lock crngs.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 stp_proto_mutex irq_context: 0 stp_proto_mutex llc_sap_list_lock irq_context: 0 stp_proto_mutex llc_sap_list_lock pool_lock#2 irq_context: 0 switchdev_notif_chain.lock irq_context: 0 (switchdev_blocking_notif_chain).rwsem irq_context: 0 br_ioctl_mutex irq_context: 0 nf_ct_proto_mutex irq_context: 0 ebt_mutex irq_context: 0 ebt_mutex fs_reclaim irq_context: 0 ebt_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 ebt_mutex pool_lock#2 irq_context: 0 dsa_tag_drivers_lock irq_context: 0 rtnl_mutex &tn->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim &rq->__lock irq_context: 0 protocol_list_lock irq_context: 0 linkfail_lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex lock kernfs_idr_lock &c->lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 rose_neigh_list_lock irq_context: 0 proto_tab_lock#2 irq_context: 0 bt_proto_lock irq_context: 0 bt_proto_lock pool_lock#2 irq_context: 0 bt_proto_lock &dir->lock irq_context: 0 bt_proto_lock &obj_hash[i].lock irq_context: 0 bt_proto_lock chan_list_lock irq_context: 0 bt_proto_lock l2cap_sk_list.lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_L2CAP irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_L2CAP slock-AF_BLUETOOTH-BTPROTO_L2CAP irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_L2CAP chan_list_lock irq_context: 0 slock-AF_BLUETOOTH-BTPROTO_L2CAP irq_context: 0 rfcomm_wq.lock irq_context: 0 rfcomm_mutex irq_context: 0 auth_domain_lock irq_context: 0 registered_mechs_lock irq_context: 0 (crypto_chain).rwsem &p->pi_lock &rq->__lock irq_context: 0 (crypto_chain).rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 atm_dev_notify_chain.lock irq_context: 0 genl_mutex irq_context: 0 proto_tab_lock#3 irq_context: 0 vlan_ioctl_mutex irq_context: 0 pernet_ops_rwsem (console_sem).lock irq_context: 0 pernet_ops_rwsem console_lock console_srcu console_owner_lock irq_context: 0 pernet_ops_rwsem console_lock console_srcu console_owner irq_context: 0 pernet_ops_rwsem console_lock console_srcu console_owner &port_lock_key irq_context: 0 pernet_ops_rwsem console_lock console_srcu console_owner console_owner_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock rcu_read_lock &ndev->lock irq_context: 0 rds_info_lock irq_context: 0 rds_trans_sem irq_context: 0 rds_trans_sem (console_sem).lock irq_context: 0 rds_trans_sem console_lock console_srcu console_owner_lock irq_context: 0 rds_trans_sem console_lock console_srcu console_owner irq_context: 0 rds_trans_sem console_lock console_srcu console_owner &port_lock_key irq_context: 0 rds_trans_sem console_lock console_srcu console_owner console_owner_lock irq_context: 0 &id_priv->lock irq_context: 0 lock#7 irq_context: 0 lock#7 fs_reclaim irq_context: 0 lock#7 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 lock#7 pool_lock#2 irq_context: 0 lock#7 &xa->xa_lock#15 irq_context: 0 lock#7 &xa->xa_lock#15 pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &h->lhash2[i].lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem wq_pool_mutex irq_context: 0 pernet_ops_rwsem wq_pool_mutex &wq->mutex irq_context: 0 pernet_ops_rwsem pcpu_lock irq_context: 0 pernet_ops_rwsem &list->lock#4 irq_context: 0 pernet_ops_rwsem &dir->lock#2 irq_context: 0 pernet_ops_rwsem ptype_lock irq_context: 0 pernet_ops_rwsem batched_entropy_u32.lock crngs.lock irq_context: 0 pernet_ops_rwsem rcu_read_lock rhashtable_bucket irq_context: 0 pernet_ops_rwsem k-clock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC k-slock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-slock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &pnettable->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex smc_ib_devices.mutex irq_context: 0 smc_wr_rx_hash_lock irq_context: 0 v9fs_trans_lock irq_context: 0 pernet_ops_rwsem &this->receive_lock irq_context: 0 &x->wait#17 &p->pi_lock irq_context: 0 &x->wait#17 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 lowpan_nhc_lock irq_context: 0 ovs_mutex irq_context: 0 pernet_ops_rwsem once_lock irq_context: 0 pernet_ops_rwsem once_lock crngs.lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &c->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &____s->seqcount irq_context: 0 &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 kernfs_idr_lock &obj_hash[i].lock irq_context: 0 kernfs_idr_lock pool_lock#2 irq_context: 0 kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events drain_vmap_work irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock purge_vmap_area_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock pool_lock#2 irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rq->__lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock pool_lock#2 irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock quarantine_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock pool_lock#2 irq_context: 0 &root->kernfs_rwsem rcu_node_0 irq_context: 0 &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 &root->kernfs_rwsem &rq->__lock irq_context: 0 &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 &root->kernfs_rwsem &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 &root->kernfs_rwsem &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem &root->kernfs_iattr_rwsem pool_lock#2 irq_context: 0 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 rcu_read_lock &rcu_state.gp_wq irq_context: 0 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback put_task_map-wait-type-override fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_callback put_task_map-wait-type-override fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (debug_obj_work).work pool_lock#2 irq_context: 0 tasklist_lock &base->lock irq_context: 0 tasklist_lock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key batched_entropy_u8.lock irq_context: 0 &sb->s_type->i_mutex_key kfence_freelist_lock irq_context: 0 &root->kernfs_rwsem quarantine_lock irq_context: 0 &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &root->kernfs_rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &root->kernfs_rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &root->kernfs_rwsem rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim pool_lock#2 irq_context: 0 key_types_sem irq_context: 0 key_types_sem asymmetric_key_parsers_sem irq_context: 0 key_types_sem asymmetric_key_parsers_sem fs_reclaim irq_context: 0 key_types_sem asymmetric_key_parsers_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 key_types_sem asymmetric_key_parsers_sem pool_lock#2 irq_context: 0 key_types_sem asymmetric_key_parsers_sem crypto_alg_sem irq_context: 0 key_types_sem asymmetric_key_parsers_sem &obj_hash[i].lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem crypto_alg_sem irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem fs_reclaim irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &c->lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &pcp->lock &zone->lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &____s->seqcount irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem pool_lock#2 irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem kthread_create_lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &p->pi_lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &x->wait irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &rq->__lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &obj_hash[i].lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem (crypto_chain).rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem &x->wait#20 irq_context: 0 key_types_sem asymmetric_key_parsers_sem &base->lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem &base->lock &obj_hash[i].lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem &rq->__lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &x->wait#20 &p->pi_lock &rq->__lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem (&timer.timer) irq_context: 0 key_types_sem asymmetric_key_parsers_sem &c->lock irq_context: 0 key_types_sem asymmetric_key_parsers_sem &____s->seqcount irq_context: 0 key_types_sem &type->lock_class irq_context: 0 key_types_sem &type->lock_class fs_reclaim irq_context: 0 key_types_sem &type->lock_class fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 key_types_sem &type->lock_class pool_lock#2 irq_context: 0 key_types_sem &type->lock_class key_user_lock irq_context: 0 key_types_sem &type->lock_class &____s->seqcount irq_context: 0 key_types_sem &type->lock_class &c->lock irq_context: 0 key_types_sem &type->lock_class crngs.lock irq_context: 0 key_types_sem &type->lock_class key_serial_lock irq_context: 0 key_types_sem &type->lock_class key_construction_mutex irq_context: 0 key_types_sem &type->lock_class key_construction_mutex &obj_hash[i].lock irq_context: 0 key_types_sem &type->lock_class key_construction_mutex pool_lock#2 irq_context: 0 key_types_sem &type->lock_class ima_keys_lock irq_context: 0 key_types_sem &obj_hash[i].lock irq_context: 0 key_types_sem pool_lock#2 irq_context: 0 slab_mutex lock irq_context: 0 slab_mutex lock kernfs_idr_lock irq_context: 0 slab_mutex &root->kernfs_rwsem irq_context: 0 slab_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 slab_mutex &k->list_lock irq_context: 0 slab_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 slab_mutex lock kernfs_idr_lock &c->lock irq_context: 0 slab_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 slab_mutex &obj_hash[i].lock irq_context: 0 slab_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 slab_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 slab_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 slab_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 slab_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 slab_mutex lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 slab_mutex lock kernfs_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex crypto_alg_sem irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock fs_reclaim irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock pool_lock#2 irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock free_vmap_area_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock vmap_area_lock irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock &____s->seqcount irq_context: 0 cpu_hotplug_lock cpuhp_state_mutex scomp_lock init_mm.page_table_lock irq_context: 0 lock pidmap_lock &c->lock irq_context: 0 lock pidmap_lock &____s->seqcount irq_context: 0 &mm->page_table_lock irq_context: 0 ptlock_ptr(ptdesc)#2 irq_context: 0 rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC k-slock-AF_RXRPC irq_context: 0 pernet_ops_rwsem k-slock-AF_RXRPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex crngs.lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &sb->s_type->i_lock_key#8 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &dir->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 &table->hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 &table->hash[i].lock k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex kthread_create_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &x->wait irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events netstamp_work irq_context: 0 (wq_completion)events netstamp_work cpu_hotplug_lock irq_context: 0 (wq_completion)events netstamp_work cpu_hotplug_lock jump_label_mutex irq_context: 0 (wq_completion)events netstamp_work cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &x->wait#21 irq_context: 0 &x->wait#21 irq_context: 0 &x->wait#21 &p->pi_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &local->services_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC fs_reclaim irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC pool_lock#2 irq_context: 0 pernet_ops_rwsem &rxnet->conn_lock irq_context: 0 pernet_ops_rwsem &call->waitq irq_context: 0 pernet_ops_rwsem &rx->call_lock irq_context: 0 pernet_ops_rwsem &rxnet->call_lock irq_context: 0 bio_slab_lock slab_mutex &root->kernfs_rwsem irq_context: 0 bio_slab_lock slab_mutex &k->list_lock irq_context: 0 bio_slab_lock slab_mutex lock irq_context: 0 bio_slab_lock slab_mutex lock kernfs_idr_lock irq_context: 0 bio_slab_lock slab_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 bio_slab_lock slab_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 init_user_ns.keyring_sem irq_context: 0 init_user_ns.keyring_sem key_user_lock irq_context: 0 init_user_ns.keyring_sem root_key_user.lock irq_context: 0 init_user_ns.keyring_sem fs_reclaim irq_context: 0 init_user_ns.keyring_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 init_user_ns.keyring_sem pool_lock#2 irq_context: 0 init_user_ns.keyring_sem crngs.lock irq_context: 0 init_user_ns.keyring_sem key_serial_lock irq_context: 0 init_user_ns.keyring_sem key_construction_mutex irq_context: 0 init_user_ns.keyring_sem &type->lock_class irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock fs_reclaim irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock pool_lock#2 irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock &c->lock irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock &____s->seqcount irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock root_key_user.lock irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock key_construction_mutex irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock key_construction_mutex keyring_name_lock irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock key_construction_mutex &obj_hash[i].lock irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock key_construction_mutex pool_lock#2 irq_context: 0 init_user_ns.keyring_sem keyring_serialise_link_lock irq_context: 0 init_user_ns.keyring_sem key_construction_mutex keyring_name_lock irq_context: 0 init_user_ns.keyring_sem &type->lock_class keyring_serialise_link_lock &obj_hash[i].lock irq_context: 0 template_list irq_context: 0 idr_lock irq_context: softirq (&rxnet->peer_keepalive_timer) irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ima_extend_list_mutex irq_context: 0 ima_extend_list_mutex fs_reclaim irq_context: 0 ima_extend_list_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 ima_extend_list_mutex pool_lock#2 irq_context: 0 (wq_completion)krxrpcd irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) &rxnet->peer_hash_lock irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) &obj_hash[i].lock irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) &base->lock irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) &base->lock &obj_hash[i].lock irq_context: 0 pci_bus_sem irq_context: 0 clk_debug_lock pin_fs_lock irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 clk_debug_lock &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 rcu_read_lock &pool->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound deferred_probe_work irq_context: 0 (wq_completion)events_unbound deferred_probe_work deferred_probe_mutex irq_context: 0 deferred_probe_work irq_context: 0 console_mutex &root->kernfs_rwsem irq_context: 0 console_mutex kernfs_notify_lock irq_context: 0 console_mutex kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 console_mutex kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 console_mutex kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 console_mutex kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 console_mutex kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 console_mutex kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 console_mutex &rq->__lock irq_context: 0 console_mutex (console_sem).lock irq_context: 0 console_mutex console_lock console_srcu console_owner_lock irq_context: 0 console_mutex console_lock console_srcu console_owner irq_context: 0 console_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 console_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: softirq &(&group->avgs_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 k-sk_lock-AF_INET irq_context: 0 k-sk_lock-AF_INET k-slock-AF_INET#2 irq_context: 0 k-sk_lock-AF_INET &table->hash[i].lock irq_context: 0 k-sk_lock-AF_INET &table->hash[i].lock k-clock-AF_INET irq_context: 0 k-sk_lock-AF_INET &table->hash[i].lock &table->hash2[i].lock irq_context: 0 k-slock-AF_INET#2 irq_context: 0 k-sk_lock-AF_INET6 irq_context: 0 k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 k-slock-AF_INET6 irq_context: 0 k-sk_lock-AF_INET6 &table->hash[i].lock irq_context: 0 k-sk_lock-AF_INET6 &table->hash[i].lock k-clock-AF_INET6 irq_context: 0 k-sk_lock-AF_INET6 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 power_off_handler_list.lock irq_context: 0 reg_requests_lock irq_context: 0 (wq_completion)events reg_work irq_context: 0 (wq_completion)events reg_work rtnl_mutex irq_context: 0 (wq_completion)events reg_work rtnl_mutex reg_requests_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex fs_reclaim irq_context: 0 (wq_completion)events reg_work rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events reg_work rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)events reg_work rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events reg_work rtnl_mutex reg_pending_beacons_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &rdev->wiphy.mtx irq_context: 0 (wq_completion)events reg_work rtnl_mutex &rdev->wiphy.mtx reg_requests_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &base->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &fw_cache.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &fw_cache.lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) async_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) init_task.alloc_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) init_task.alloc_lock init_fs.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) init_task.alloc_lock init_fs.lock &dentry->d_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rcu_read_lock &____s->seqcount#6 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key &dentry->d_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &sb->s_type->i_mutex_key &dentry->d_lock &wq irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &dentry->d_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) (console_sem).lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 detector_work irq_context: 0 &wq->mutex &x->wait#10 irq_context: 0 rcu_read_lock &pool->lock (worker)->lock irq_context: 0 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock irq_context: 0 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 acpi_gpio_deferred_req_irqs_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) console_owner_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) console_owner irq_context: 0 gpd_list_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem usermodehelper_disabled_waitq.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &x->wait#9 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &k->list_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex &k->list_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex &c->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem lock kernfs_idr_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &root->kernfs_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem bus_type_sem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem sysfs_symlink_target_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &root->kernfs_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &dev->power.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem dpm_list_mtx irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &k->k_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem subsys mutex#73 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem subsys mutex#73 &k->k_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem fw_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &c->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem running_helpers_waitq.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &x->wait#22 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &base->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 rcu_state.exp_mutex pool_lock#2 irq_context: 0 rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_lock_key#2 irq_context: 0 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 tomoyo_ss &obj_hash[i].lock irq_context: 0 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 tomoyo_ss tomoyo_log_lock irq_context: 0 tomoyo_ss tomoyo_log_wait.lock irq_context: 0 tomoyo_ss &c->lock irq_context: 0 tomoyo_ss &____s->seqcount irq_context: 0 cdev_lock irq_context: 0 tty_mutex (console_sem).lock irq_context: 0 tty_mutex console_lock irq_context: 0 tty_mutex fs_reclaim irq_context: 0 tty_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 tty_mutex &c->lock irq_context: 0 tty_mutex &pcp->lock &zone->lock irq_context: 0 tty_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 tty_mutex &____s->seqcount irq_context: 0 tty_mutex pool_lock#2 irq_context: 0 tty_mutex tty_ldiscs_lock irq_context: 0 tty_mutex &obj_hash[i].lock irq_context: 0 tty_mutex &k->list_lock irq_context: 0 tty_mutex &k->k_lock irq_context: 0 tty_mutex &tty->legacy_mutex irq_context: 0 tty_mutex &tty->legacy_mutex &tty->read_wait irq_context: 0 tty_mutex &tty->legacy_mutex &tty->write_wait irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem fs_reclaim irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &c->lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &____s->seqcount irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem pool_lock#2 irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem free_vmap_area_lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem vmap_area_lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem init_mm.page_table_lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &tty->write_wait irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &tty->read_wait irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &tty->termios_rwsem irq_context: 0 &tty->legacy_mutex irq_context: 0 &tty->legacy_mutex &tty->files_lock irq_context: 0 &tty->legacy_mutex &port->lock irq_context: 0 &tty->legacy_mutex &port->mutex irq_context: 0 &tty->legacy_mutex &port->mutex fs_reclaim irq_context: 0 &tty->legacy_mutex &port->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &tty->legacy_mutex &port->mutex &____s->seqcount irq_context: 0 &tty->legacy_mutex &port->mutex &port_lock_key irq_context: 0 &tty->legacy_mutex &port->mutex pool_lock#2 irq_context: 0 &tty->legacy_mutex &port->mutex &desc->request_mutex irq_context: 0 &tty->legacy_mutex &port->mutex &desc->request_mutex &irq_desc_lock_class irq_context: 0 &tty->legacy_mutex &port->mutex &desc->request_mutex &irq_desc_lock_class irq_controller_lock irq_context: 0 &tty->legacy_mutex &port->mutex &desc->request_mutex &irq_desc_lock_class mask_lock irq_context: 0 &tty->legacy_mutex &port->mutex &desc->request_mutex &irq_desc_lock_class mask_lock tmp_mask_lock irq_context: 0 &tty->legacy_mutex &port->mutex register_lock irq_context: 0 &tty->legacy_mutex &port->mutex register_lock proc_subdir_lock irq_context: 0 &tty->legacy_mutex &port->mutex register_lock fs_reclaim irq_context: 0 &tty->legacy_mutex &port->mutex register_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &tty->legacy_mutex &port->mutex register_lock pool_lock#2 irq_context: 0 &tty->legacy_mutex &port->mutex register_lock proc_inum_ida.xa_lock irq_context: 0 &tty->legacy_mutex &port->mutex register_lock proc_subdir_lock irq_context: 0 &tty->legacy_mutex &port->mutex register_lock &c->lock irq_context: 0 &tty->legacy_mutex &port->mutex register_lock &____s->seqcount irq_context: 0 &tty->legacy_mutex &port->mutex &irq_desc_lock_class irq_context: 0 &tty->legacy_mutex &port->mutex proc_subdir_lock irq_context: 0 &tty->legacy_mutex &port->mutex proc_inum_ida.xa_lock irq_context: 0 &tty->legacy_mutex &port->mutex proc_subdir_lock irq_context: 0 &tty->legacy_mutex &port_lock_key irq_context: 0 sb_writers#2 irq_context: 0 sb_writers#2 mount_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 rename_lock.seqcount irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 fs_reclaim irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 pool_lock#2 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &dentry->d_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &dentry->d_lock &obj_hash[i].lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &dentry->d_lock pool_lock#2 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &obj_hash[i].lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_log_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_log_wait.lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss &c->lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_lock_key#2 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &s->s_inode_list_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 tk_core.seq.seqcount irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_lock_key#2 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key &dentry->d_lock &wq#2 irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_mutex_key irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_mutex_key tk_core.seq.seqcount irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_mutex_key rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#2 &sb->s_type->i_mutex_key/1 &sb->s_type->i_mutex_key &dentry->d_lock irq_context: 0 tomoyo_ss file_systems_lock irq_context: 0 tomoyo_ss fs_reclaim irq_context: 0 tomoyo_ss fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 tomoyo_ss rcu_read_lock init_fs.seq.seqcount irq_context: 0 tomoyo_ss tomoyo_policy_lock &____s->seqcount irq_context: 0 &type->s_umount_key#24/1 irq_context: 0 &type->s_umount_key#24/1 fs_reclaim irq_context: 0 &type->s_umount_key#24/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#24/1 pool_lock#2 irq_context: 0 &type->s_umount_key#24/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#24/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#24/1 shrinker_mutex irq_context: 0 &type->s_umount_key#24/1 &c->lock irq_context: 0 &type->s_umount_key#24/1 &____s->seqcount irq_context: 0 &type->s_umount_key#24/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#24/1 sb_lock irq_context: 0 &type->s_umount_key#24/1 inode_hash_lock irq_context: 0 &type->s_umount_key#24/1 inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 &type->s_umount_key#24/1 bdev_lock irq_context: 0 &type->s_umount_key#24/1 &disk->open_mutex irq_context: 0 &type->s_umount_key#24/1 &disk->open_mutex bdev_lock irq_context: 0 &type->s_umount_key#24/1 &disk->open_mutex bdev_lock &bdev->bd_holder_lock irq_context: 0 &type->s_umount_key#24/1 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#24/1 &wq->mutex irq_context: 0 &type->s_umount_key#24/1 &wq->mutex &pool->lock irq_context: 0 &type->s_umount_key#24/1 kthread_create_lock irq_context: 0 &type->s_umount_key#24/1 &p->pi_lock irq_context: 0 &type->s_umount_key#24/1 &x->wait irq_context: 0 &type->s_umount_key#24/1 &rq->__lock irq_context: 0 &type->s_umount_key#24/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#24/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#24/1 &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#24/1 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#24/1 wq_pool_mutex irq_context: 0 &type->s_umount_key#24/1 wq_pool_mutex &wq->mutex irq_context: 0 &type->s_umount_key#24/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#24/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#24/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#24/1 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &type->s_umount_key#24/1 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#24/1 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &type->s_umount_key#24/1 lock#4 irq_context: 0 &type->s_umount_key#24/1 &mapping->i_private_lock irq_context: 0 &type->s_umount_key#24/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#24/1 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#24/1 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->s_umount_key#24/1 bit_wait_table + i irq_context: hardirq bit_wait_table + i irq_context: hardirq bit_wait_table + i &p->pi_lock irq_context: hardirq bit_wait_table + i &p->pi_lock &rq->__lock irq_context: hardirq bit_wait_table + i &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#26/1 rcu_read_lock &retval->lock irq_context: 0 &type->s_umount_key#26/1 rcu_read_lock &____s->seqcount irq_context: hardirq &retval->lock irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_lock_key#22 irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_es_lock irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &type->s_umount_key#26/1 &xa->xa_lock#9 &c->lock irq_context: 0 &type->s_umount_key#26/1 &xa->xa_lock#9 &____s->seqcount irq_context: 0 &type->s_umount_key#26/1 proc_subdir_lock irq_context: 0 &type->s_umount_key#26/1 proc_inum_ida.xa_lock irq_context: 0 &type->s_umount_key#26/1 proc_subdir_lock irq_context: 0 &type->s_umount_key#26/1 &journal->j_state_lock irq_context: 0 &type->s_umount_key#26/1 kthread_create_lock irq_context: 0 &type->s_umount_key#26/1 &p->pi_lock irq_context: 0 &type->s_umount_key#26/1 &x->wait irq_context: 0 &journal->j_wait_done_commit irq_context: 0 &type->s_umount_key#26/1 &journal->j_wait_done_commit irq_context: 0 &journal->j_state_lock irq_context: 0 &journal->j_state_lock &journal->j_wait_done_commit irq_context: 0 &journal->j_state_lock &journal->j_wait_commit irq_context: 0 &type->s_umount_key#26/1 &journal->j_state_lock irq_context: 0 &type->s_umount_key#26/1 &p->alloc_lock irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 &type->s_umount_key#26/1 cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 &type->s_umount_key#26/1 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#26/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#26/1 &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#26/1 wq_pool_mutex irq_context: 0 &type->s_umount_key#26/1 wq_pool_mutex &wq->mutex irq_context: 0 &type->s_umount_key#26/1 &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 &type->s_umount_key#26/1 &ei->i_es_lock irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex fs_reclaim irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex &root->kernfs_rwsem irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex &k->list_lock irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex lock irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex lock kernfs_idr_lock irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex &c->lock irq_context: 0 &type->s_umount_key#26/1 ext4_grpinfo_slab_create_mutex slab_mutex &____s->seqcount irq_context: 0 &type->s_umount_key#26/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#26/1 ext4_li_mtx irq_context: 0 &type->s_umount_key#26/1 lock irq_context: 0 &type->s_umount_key#26/1 lock kernfs_idr_lock irq_context: 0 &type->s_umount_key#26/1 &root->kernfs_rwsem irq_context: 0 &type->s_umount_key#26/1 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &type->s_umount_key#26/1 lock kernfs_idr_lock pool_lock#2 irq_context: 0 &type->s_umount_key#26/1 (console_sem).lock irq_context: 0 &type->s_umount_key#26/1 console_lock console_srcu console_owner_lock irq_context: 0 &type->s_umount_key#26/1 console_lock console_srcu console_owner irq_context: 0 &type->s_umount_key#26/1 console_lock console_srcu console_owner &port_lock_key irq_context: 0 &type->s_umount_key#26/1 console_lock console_srcu console_owner console_owner_lock irq_context: 0 &type->s_umount_key#26/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_lock_key#22 irq_context: 0 &type->i_mutex_dir_key#3 irq_context: 0 &type->i_mutex_dir_key#3 fs_reclaim irq_context: 0 &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#3 pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#3 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_es_lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#3 &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#3 &xa->xa_lock#9 irq_context: 0 &type->i_mutex_dir_key#3 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#3 lock#4 irq_context: 0 &type->i_mutex_dir_key#3 &mapping->i_private_lock irq_context: 0 &type->i_mutex_dir_key#3 tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->i_mutex_dir_key#3 bit_wait_table + i irq_context: 0 &type->i_mutex_dir_key#3 &rq->__lock irq_context: 0 &type->i_mutex_dir_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#3 inode_hash_lock irq_context: 0 &type->i_mutex_dir_key#3 &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#3 &obj_hash[i].lock pool_lock irq_context: 0 &type->i_mutex_dir_key#3 inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 &type->i_mutex_dir_key#3 inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->i_mutex_dir_key#3 &journal->j_state_lock irq_context: 0 &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 irq_context: 0 &type->i_mutex_dir_key#3 &ei->xattr_sem irq_context: 0 &type->i_mutex_dir_key#3 &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 &dentry->d_lock &wq irq_context: 0 &type->i_mutex_dir_key#3 irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem fs_reclaim irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem rename_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock mount_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock mount_lock.seqcount rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock mount_lock.seqcount &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem mount_lock mount_lock.seqcount pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key namespace_sem &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key namespace_sem &obj_hash[i].lock irq_context: 0 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: 0 rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: hardirq rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 rcu_state.barrier_mutex &x->wait#24 irq_context: 0 rcu_state.barrier_mutex &rq->__lock irq_context: 0 rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) &rsp->gp_wait irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) pcpu_lock irq_context: softirq rcu_callback &x->wait#24 irq_context: softirq rcu_callback &x->wait#24 &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#3 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &type->i_mutex_dir_key#3 lock#4 &lruvec->lru_lock irq_context: 0 &type->i_mutex_dir_key#3 &c->lock irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &retval->lock irq_context: 0 &type->i_mutex_dir_key#3 rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 &dentry->d_lock &wq#2 irq_context: 0 &mm->mmap_lock fs_reclaim irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock &c->lock irq_context: 0 &mm->mmap_lock &____s->seqcount irq_context: 0 &mm->mmap_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock irq_context: 0 &mm->mmap_lock fs_reclaim irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->page_table_lock irq_context: 0 &mm->mmap_lock pool_lock#2 irq_context: 0 &mm->mmap_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock &c->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &mm->page_table_lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &sig->cred_guard_mutex irq_context: 0 &sig->cred_guard_mutex fs_reclaim irq_context: 0 &sig->cred_guard_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex pool_lock#2 irq_context: 0 &sig->cred_guard_mutex init_fs.lock irq_context: 0 &sig->cred_guard_mutex &p->pi_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &xa->xa_lock#9 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock lock#4 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &xa->xa_lock#9 pool_lock#2 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_es_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &____s->seqcount irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem pool_lock#2 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock &nvmeq->sq_lock irq_context: 0 &sig->cred_guard_mutex &folio_wait_table[i] irq_context: 0 &sig->cred_guard_mutex &rq->__lock irq_context: 0 &sig->cred_guard_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss irq_context: 0 &sig->cred_guard_mutex tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &c->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &____s->seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss pool_lock#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss fs_reclaim irq_context: 0 &sig->cred_guard_mutex tomoyo_ss fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock init_fs.seq.seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_log_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_log_wait.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex binfmt_lock irq_context: 0 &sig->cred_guard_mutex init_binfmt_misc.entries_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock init_fs.seq.seqcount irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 fs_reclaim irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_es_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &mapping->i_private_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 inode_hash_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 inode_hash_lock &s->s_inode_list_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &journal->j_state_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 lock#4 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->xattr_sem irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->xattr_sem lock#4 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 &dentry->d_lock &wq irq_context: 0 &sig->cred_guard_mutex &sb->s_type->i_lock_key#22 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 rename_lock.seqcount irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &xa->xa_lock#9 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 bit_wait_table + i irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &rq->__lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&ipvs->defense_work)->timer irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&gc_work->dwork)->timer irq_context: softirq &(&gc_work->dwork)->timer rcu_read_lock &pool->lock irq_context: softirq &(&gc_work->dwork)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&gc_work->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &s->s_inode_list_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &ipvs->dropentry_lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &ipvs->droppacket_lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &ipvs->securetcp_lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &base->lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 batched_entropy_u8.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 kfence_freelist_lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &c->lock irq_context: 0 &sig->cred_guard_mutex &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 &dentry->d_lock &wq#2 irq_context: 0 &sig->cred_guard_mutex &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock lock#4 &lruvec->lru_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &c->lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock &retval->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &p->alloc_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &p->alloc_lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &sighand->siglock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &newf->file_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock batched_entropy_u64.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock batched_entropy_u64.lock crngs.lock irq_context: 0 batched_entropy_u16.lock irq_context: 0 batched_entropy_u16.lock crngs.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem irq_context: 0 &mm->mmap_lock &anon_vma->rwsem irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 rcu_read_lock ptlock_ptr(ptdesc)#2/1 irq_context: 0 &mm->mmap_lock lock#4 irq_context: 0 &mm->mmap_lock lock#4 &lruvec->lru_lock irq_context: 0 &mm->mmap_lock lock#5 irq_context: 0 &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &mm->page_table_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem pool_lock#2 irq_context: 0 mapping.invalidate_lock irq_context: 0 mapping.invalidate_lock mmu_notifier_invalidate_range_start irq_context: 0 mapping.invalidate_lock &____s->seqcount irq_context: 0 mapping.invalidate_lock &xa->xa_lock#9 irq_context: 0 mapping.invalidate_lock lock#4 irq_context: 0 mapping.invalidate_lock lock#4 &lruvec->lru_lock irq_context: 0 mapping.invalidate_lock &ei->i_es_lock irq_context: 0 mapping.invalidate_lock pool_lock#2 irq_context: 0 mapping.invalidate_lock &c->lock irq_context: 0 mapping.invalidate_lock tk_core.seq.seqcount irq_context: 0 mapping.invalidate_lock rcu_read_lock pool_lock#2 irq_context: 0 mapping.invalidate_lock rcu_read_lock &retval->lock irq_context: 0 mapping.invalidate_lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 mapping.invalidate_lock rcu_read_lock &nvmeq->sq_lock irq_context: 0 mapping.invalidate_lock &folio_wait_table[i] irq_context: 0 mapping.invalidate_lock &rq->__lock irq_context: 0 mapping.invalidate_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &obj_hash[i].lock pool_lock irq_context: 0 binfmt_lock irq_context: 0 &fsnotify_mark_srcu irq_context: 0 &xa->xa_lock#9 irq_context: 0 &vma->vm_lock->lock fs_reclaim irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &vma->vm_lock->lock &____s->seqcount irq_context: 0 &vma->vm_lock->lock pool_lock#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: hardirq &rq->__lock pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#3 &dentry->d_lock &wq irq_context: 0 rcu_read_lock &dentry->d_lock &lru->node[i].lock irq_context: 0 &type->i_mutex_dir_key#3 &dentry->d_lock &wq#2 irq_context: 0 mapping.invalidate_lock &xa->xa_lock#9 pool_lock#2 irq_context: 0 mapping.invalidate_lock &ei->i_data_sem irq_context: 0 mapping.invalidate_lock &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 mapping.invalidate_lock &ei->i_data_sem pool_lock#2 irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock mapping.invalidate_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &c->lock irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &____s->seqcount irq_context: 0 mapping.invalidate_lock rcu_read_lock &c->lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &____s->seqcount irq_context: 0 tomoyo_ss quarantine_lock irq_context: 0 mapping.invalidate_lock &xa->xa_lock#9 &c->lock irq_context: 0 mapping.invalidate_lock &xa->xa_lock#9 &____s->seqcount irq_context: 0 tomoyo_ss batched_entropy_u8.lock irq_context: 0 tomoyo_ss kfence_freelist_lock irq_context: 0 tomoyo_ss &meta->lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 mapping.invalidate_lock &ei->i_es_lock key#2 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &base->lock &obj_hash[i].lock irq_context: 0 &port->mutex irq_context: 0 &tty->ldisc_sem irq_context: 0 &tty->ldisc_sem &tty->termios_rwsem irq_context: 0 &tty->ldisc_sem &mm->mmap_lock irq_context: 0 &tty->ldisc_sem &tty->termios_rwsem irq_context: 0 &tty->ldisc_sem &tty->termios_rwsem &port->mutex irq_context: 0 &tty->ldisc_sem &tty->termios_rwsem &tty->ldisc_sem &tty->write_wait irq_context: 0 &tty->ldisc_sem &tty->termios_rwsem &tty->ldisc_sem &tty->read_wait irq_context: 0 task_group_lock irq_context: 0 &sighand->siglock &p->pi_lock irq_context: 0 &sighand->siglock &p->pi_lock &rq->__lock irq_context: 0 &sighand->siglock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 lock#4 irq_context: 0 &type->s_umount_key#27/1 irq_context: 0 &type->s_umount_key#27/1 fs_reclaim irq_context: 0 &type->s_umount_key#27/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#27/1 pool_lock#2 irq_context: 0 &type->s_umount_key#27/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#27/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#27/1 shrinker_mutex irq_context: 0 &type->s_umount_key#27/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#27/1 sb_lock irq_context: 0 &type->s_umount_key#27/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#27/1 &____s->seqcount irq_context: 0 &type->s_umount_key#27/1 &c->lock irq_context: 0 &type->s_umount_key#27/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_lock_key#23 irq_context: 0 &type->s_umount_key#27/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#27/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 fs_reclaim irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 pool_lock#2 irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 &dentry->d_lock irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#27/1 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 &type->s_umount_key#27/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_lock_key#23 irq_context: 0 &sb->s_type->i_mutex_key#9 irq_context: 0 &sb->s_type->i_mutex_key#9 rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 proc_subdir_lock irq_context: 0 &sb->s_type->i_mutex_key#9 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 irq_context: 0 &sb->s_type->i_mutex_key#9 &s->s_inode_list_lock irq_context: 0 &sb->s_type->i_mutex_key#9 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq#2 irq_context: 0 tomoyo_ss rcu_read_lock rename_lock.seqcount irq_context: 0 &p->lock irq_context: 0 &p->lock fs_reclaim irq_context: 0 &p->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock pool_lock#2 irq_context: 0 &p->lock &mm->mmap_lock irq_context: 0 &type->s_umount_key#28/1 irq_context: 0 &type->s_umount_key#28/1 fs_reclaim irq_context: 0 &type->s_umount_key#28/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#28/1 pool_lock#2 irq_context: 0 &type->s_umount_key#28/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#28/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#28/1 shrinker_mutex irq_context: 0 &type->s_umount_key#28/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#28/1 &c->lock irq_context: 0 &type->s_umount_key#28/1 &____s->seqcount irq_context: 0 &type->s_umount_key#28/1 sb_lock irq_context: 0 &type->s_umount_key#28/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem inode_hash_lock irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem fs_reclaim irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem &c->lock irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem &____s->seqcount irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem pool_lock#2 irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#24 irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_rwsem &sb->s_type->i_lock_key#24 irq_context: 0 &type->s_umount_key#28/1 &sb->s_type->i_lock_key#24 irq_context: 0 &type->s_umount_key#28/1 &sb->s_type->i_lock_key#24 &dentry->d_lock irq_context: 0 &type->s_umount_key#28/1 crngs.lock irq_context: 0 &type->s_umount_key#28/1 &root->kernfs_supers_rwsem irq_context: 0 &type->s_umount_key#28/1 &dentry->d_lock irq_context: 0 &root->kernfs_iattr_rwsem irq_context: 0 &type->i_mutex_dir_key#4 irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem inode_hash_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#24 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sb->s_type->i_lock_key#24 irq_context: 0 &type->i_mutex_dir_key#4 mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 iattr_mutex irq_context: 0 &type->i_mutex_dir_key#4 &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 irq_context: 0 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 &dentry->d_lock &wq irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &wq irq_context: 0 &ent->pde_unload_lock irq_context: 0 &p->lock file_systems_lock irq_context: 0 namespace_sem mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 namespace_sem mount_lock mount_lock.seqcount &dentry->d_lock irq_context: 0 namespace_sem mount_lock mount_lock.seqcount rcu_read_lock &dentry->d_lock irq_context: 0 namespace_sem mount_lock mount_lock.seqcount &obj_hash[i].lock irq_context: 0 namespace_sem mount_lock mount_lock.seqcount pool_lock#2 irq_context: 0 &type->s_umount_key#29 irq_context: 0 &type->s_umount_key#29 &x->wait#23 irq_context: 0 &type->s_umount_key#29 shrinker_mutex irq_context: 0 &type->s_umount_key#29 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#29 pool_lock#2 irq_context: 0 &type->s_umount_key#29 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#29 rcu_read_lock &dentry->d_lock &lru->node[i].lock irq_context: 0 &type->s_umount_key#29 rename_lock.seqcount irq_context: 0 &type->s_umount_key#29 &dentry->d_lock irq_context: 0 &type->s_umount_key#29 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->s_umount_key#29 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock irq_context: 0 &type->s_umount_key#29 &sb->s_type->i_lock_key#23 irq_context: 0 &type->s_umount_key#29 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#29 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#29 &fsnotify_mark_srcu irq_context: 0 &type->s_umount_key#29 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#29 &dentry->d_lock pool_lock#2 irq_context: 0 &type->s_umount_key#29 &dentry->d_lock/1 irq_context: 0 unnamed_dev_ida.xa_lock irq_context: 0 krc.lock irq_context: 0 &x->wait#25 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 mapping.invalidate_lock &pcp->lock &zone->lock irq_context: 0 mapping.invalidate_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &net->unx.table.locks[i] irq_context: 0 &sb->s_type->i_lock_key#8 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#10 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->unx.table.locks[i] irq_context: 0 &sb->s_type->i_mutex_key#10 &u->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &u->lock clock-AF_UNIX irq_context: 0 &sb->s_type->i_mutex_key#10 &u->peer_wait irq_context: 0 &sb->s_type->i_mutex_key#10 rlock-AF_UNIX irq_context: 0 &sb->s_type->i_mutex_key#10 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 &dir->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &obj_hash[i].lock irq_context: 0 &dentry->d_lock/1 irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &c->lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 &sig->cred_guard_mutex &fs->lock irq_context: 0 &sig->cred_guard_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &____s->seqcount#3 irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &____s->seqcount#3 irq_context: 0 &sig->cred_guard_mutex lock#4 irq_context: 0 &sig->cred_guard_mutex &sb->s_type->i_mutex_key#8 irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock &x->wait#25 irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock &x->wait#25 &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &p->alloc_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &p->alloc_lock cpu_asid_lock irq_context: 0 &sig->wait_chldexit irq_context: 0 tasklist_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock irq_context: 0 &mm->mmap_lock &p->alloc_lock irq_context: 0 &mm->mmap_lock lock#4 irq_context: 0 &mm->mmap_lock lock#4 &lruvec->lru_lock irq_context: 0 &mm->mmap_lock lock#5 irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &____s->seqcount irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem pool_lock#2 irq_context: 0 &mm->mmap_lock &lruvec->lru_lock irq_context: 0 tasklist_lock &sighand->siglock &____s->seqcount irq_context: 0 tasklist_lock &sighand->siglock &c->lock irq_context: 0 tasklist_lock &sighand->siglock pool_lock#2 irq_context: 0 rcu_read_lock &____s->seqcount#4 irq_context: 0 &prev->lock irq_context: 0 &(&sig->stats_lock)->lock irq_context: 0 &(&sig->stats_lock)->lock &____s->seqcount#4 irq_context: 0 sb_writers#3 irq_context: 0 sb_writers#3 mount_lock irq_context: 0 &type->i_mutex_dir_key#3 rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#4 mount_lock irq_context: 0 sb_writers#4 tk_core.seq.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_lock_key#23 irq_context: 0 sb_writers#4 &wb->list_lock irq_context: 0 sb_writers#4 &wb->list_lock &sb->s_type->i_lock_key#23 irq_context: 0 &sb->s_type->i_mutex_key#9 &p->alloc_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &pid->lock irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq irq_context: 0 &p->alloc_lock &fs->lock &dentry->d_lock irq_context: 0 &p->lock namespace_sem irq_context: 0 &p->lock namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &p->lock namespace_sem rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 &type->s_umount_key#30 irq_context: 0 &type->s_umount_key#30 &lru->node[i].lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->s_umount_key#30 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock pool_lock#2 irq_context: 0 &type->s_umount_key#30 &sb->s_type->i_lock_key#22 irq_context: 0 &type->s_umount_key#30 &sb->s_type->i_lock_key#22 &lru->node[i].lock irq_context: 0 &type->s_umount_key#30 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 &type->s_umount_key#30 &sbi->s_writepages_rwsem irq_context: 0 &type->s_umount_key#30 &sbi->s_writepages_rwsem &rsp->gp_wait irq_context: 0 &type->s_umount_key#30 &sbi->s_writepages_rwsem &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 &sbi->s_writepages_rwsem &x->wait#3 irq_context: 0 &type->s_umount_key#30 &sbi->s_writepages_rwsem &rq->__lock irq_context: 0 &type->s_umount_key#30 &sbi->s_writepages_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback put_task_map-wait-type-override task_group_lock irq_context: softirq rcu_callback &x->wait#3 irq_context: softirq rcu_callback &x->wait#3 &p->pi_lock irq_context: softirq rcu_callback &x->wait#3 &p->pi_lock &rq->__lock irq_context: softirq rcu_callback &x->wait#3 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 &sem->waiters irq_context: 0 &type->s_umount_key#30 &rsp->gp_wait irq_context: 0 &type->s_umount_key#30 &rsp->gp_wait &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 &rsp->gp_wait pool_lock#2 irq_context: 0 &type->s_umount_key#30 &journal->j_state_lock irq_context: 0 &type->s_umount_key#30 &p->alloc_lock irq_context: 0 &type->s_umount_key#30 (work_completion)(&sbi->s_sb_upd_work) irq_context: 0 &type->s_umount_key#30 &journal->j_state_lock irq_context: 0 &type->s_umount_key#30 key#3 irq_context: 0 &type->s_umount_key#30 key#4 irq_context: 0 &type->s_umount_key#30 &sbi->s_error_lock irq_context: 0 &type->s_umount_key#30 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#30 pool_lock#2 irq_context: 0 &type->s_umount_key#30 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 &base->lock irq_context: 0 &type->s_umount_key#30 &base->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->s_umount_key#30 bit_wait_table + i irq_context: 0 &type->s_umount_key#30 &rq->__lock irq_context: 0 &type->s_umount_key#30 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 ext4_li_mtx irq_context: 0 &type->s_umount_key#30 ext4_li_mtx fs_reclaim irq_context: 0 &type->s_umount_key#30 ext4_li_mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#30 ext4_li_mtx pool_lock#2 irq_context: 0 &type->s_umount_key#30 ext4_li_mtx batched_entropy_u16.lock irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &eli->li_list_mtx irq_context: 0 &type->s_umount_key#30 ext4_li_mtx kthread_create_lock irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &p->pi_lock irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &x->wait irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &rq->__lock irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 ext4_li_mtx &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 (console_sem).lock irq_context: 0 &eli->li_list_mtx irq_context: 0 &type->s_umount_key#30 console_lock console_srcu console_owner_lock irq_context: 0 &type->s_umount_key#30 console_lock console_srcu console_owner irq_context: 0 &type->s_umount_key#30 console_lock console_srcu console_owner &port_lock_key irq_context: 0 &type->s_umount_key#30 console_lock console_srcu console_owner console_owner_lock irq_context: 0 &type->s_umount_key#30 mount_lock irq_context: 0 &type->s_umount_key#30 mount_lock mount_lock.seqcount irq_context: 0 &type->s_umount_key#30 mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 rcu_read_lock &pid->lock irq_context: 0 rcu_read_lock &sb->s_type->i_lock_key#23 irq_context: 0 &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 rename_lock.seqcount irq_context: 0 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock irq_context: 0 &pid->lock irq_context: 0 sb_writers#3 tk_core.seq.seqcount irq_context: 0 sb_writers#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &____s->seqcount irq_context: 0 sb_writers#3 &c->lock irq_context: 0 sb_writers#3 pool_lock#2 irq_context: 0 sb_writers#3 &journal->j_state_lock irq_context: 0 sb_writers#3 &journal->j_state_lock irq_context: 0 sb_writers#3 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &journal->j_state_lock &base->lock irq_context: 0 sb_writers#3 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 jbd2_handle irq_context: 0 sb_writers#3 jbd2_handle &mapping->i_private_lock irq_context: 0 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 jbd2_handle &____s->seqcount irq_context: 0 sb_writers#3 jbd2_handle &c->lock irq_context: 0 sb_writers#3 jbd2_handle pool_lock#2 irq_context: 0 sb_writers#3 jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_revoke_lock irq_context: 0 sb_writers#3 jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &wb->list_lock irq_context: 0 sb_writers#3 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &wb->work_lock irq_context: 0 sb_writers#3 &wb->work_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &wb->work_lock &base->lock irq_context: 0 sb_writers#3 &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &ei->xattr_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &ei->xattr_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &xa->xa_lock#9 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 lock#4 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock &nvmeq->sq_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 bit_wait_table + i irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_revoke_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &meta_group_info[i]->alloc_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle inode_hash_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle batched_entropy_u32.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem &journal->j_revoke_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle smack_known_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle smack_known_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle smack_known_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_wait_updates irq_context: 0 sb_internal rcu_read_lock init_fs.seq.seqcount irq_context: 0 sb_internal rcu_read_lock rcu_read_lock mount_lock.seqcount irq_context: 0 sb_internal rcu_read_lock rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_internal mmu_notifier_invalidate_range_start irq_context: 0 sb_internal &____s->seqcount irq_context: 0 sb_internal pool_lock#2 irq_context: 0 sb_internal &journal->j_state_lock irq_context: 0 sb_internal jbd2_handle irq_context: 0 sb_internal jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_internal jbd2_handle pool_lock#2 irq_context: 0 sb_internal jbd2_handle &ret->b_state_lock irq_context: 0 sb_internal jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_internal jbd2_handle &journal->j_revoke_lock irq_context: 0 sb_internal jbd2_handle &mapping->i_private_lock irq_context: 0 sb_internal jbd2_handle &journal->j_wait_updates irq_context: 0 sb_internal &obj_hash[i].lock irq_context: 0 &ei->i_data_sem irq_context: 0 &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 &sighand->siglock hrtimer_bases.lock irq_context: 0 &sighand->siglock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 &sighand->siglock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 file_rwsem irq_context: 0 file_rwsem &ctx->flc_lock irq_context: 0 file_rwsem &ctx->flc_lock &fll->lock irq_context: 0 &ctx->flc_lock irq_context: 0 &sig->cred_guard_mutex &c->lock irq_context: 0 &sig->cred_guard_mutex &____s->seqcount irq_context: 0 &sig->cred_guard_mutex tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 mount_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &____s->seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &c->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 pool_lock#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &____s->seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &c->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle pool_lock#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &ret->b_state_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_revoke_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &ei->i_raw_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_wait_updates irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &sb->s_type->i_lock_key#22 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &wb->list_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss quarantine_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &mapping->i_private_lock irq_context: 0 &mm->mmap_lock tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock sb_writers#3 mount_lock irq_context: 0 &mm->mmap_lock sb_writers#3 tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock sb_writers#3 mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock sb_writers#3 batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock sb_writers#3 kfence_freelist_lock irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &ei->i_raw_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_wait_updates irq_context: 0 &mm->mmap_lock sb_writers#3 &obj_hash[i].lock irq_context: 0 &mm->mmap_lock sb_writers#3 &meta->lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_es_lock key#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 &p->lock &c->lock irq_context: 0 &p->lock &pcp->lock &zone->lock irq_context: 0 &p->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &p->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: softirq rcu_callback &rsp->gp_wait irq_context: softirq rcu_callback &cfs_rq->removed.lock irq_context: softirq rcu_callback &rq->__lock irq_context: 0 rcu_read_lock &p->alloc_lock irq_context: 0 &type->s_umount_key#31/1 irq_context: 0 &type->s_umount_key#31/1 fs_reclaim irq_context: 0 &type->s_umount_key#31/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#31/1 pool_lock#2 irq_context: 0 &type->s_umount_key#31/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#31/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#31/1 shrinker_mutex irq_context: 0 &type->s_umount_key#31/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#31/1 sb_lock irq_context: 0 &type->s_umount_key#31/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#31/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_lock_key#25 irq_context: 0 &type->s_umount_key#31/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#31/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_lock_key#25 &dentry->d_lock irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 fs_reclaim irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 pool_lock#2 irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 &dentry->d_lock irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 &sb->s_type->i_lock_key#25 irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#31/1 &sb->s_type->i_mutex_key#11 &sb->s_type->i_lock_key#25 &dentry->d_lock irq_context: 0 &type->s_umount_key#31/1 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#2 irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem fs_reclaim irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem rename_lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock mount_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock mount_lock.seqcount rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem mount_lock mount_lock.seqcount &obj_hash[i].lock irq_context: 0 &type->s_umount_key/1 fs_reclaim irq_context: 0 &type->s_umount_key/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key/1 &obj_hash[i].lock pool_lock irq_context: 0 &type->s_umount_key/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 &c->lock irq_context: 0 &type->i_mutex_dir_key#4 &____s->seqcount irq_context: 0 &type->s_umount_key#32 irq_context: 0 &type->s_umount_key#32 sb_lock irq_context: 0 &type->s_umount_key#32 fs_reclaim irq_context: 0 &type->s_umount_key#32 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#32 pool_lock#2 irq_context: 0 &type->s_umount_key#32 &dentry->d_lock irq_context: 0 &type->s_umount_key#32 &c->lock irq_context: 0 &type->s_umount_key#32 &____s->seqcount irq_context: 0 &type->s_umount_key#32 &lru->node[i].lock irq_context: 0 &type->s_umount_key#32 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#32 &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem fs_reclaim irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem rename_lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock mount_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock mount_lock.seqcount rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 namespace_sem mount_lock mount_lock.seqcount &obj_hash[i].lock irq_context: 0 &type->s_umount_key#33 irq_context: 0 &type->s_umount_key#33 sb_lock irq_context: 0 &type->s_umount_key#33 &dentry->d_lock irq_context: 0 &type->s_umount_key#34 irq_context: 0 &type->s_umount_key#34 sb_lock irq_context: 0 &type->s_umount_key#34 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 sysctl_lock irq_context: 0 tomoyo_ss &rq->__lock irq_context: 0 rcu_read_lock &dentry->d_lock sysctl_lock irq_context: 0 &type->s_umount_key#35/1 irq_context: 0 &type->s_umount_key#35/1 fs_reclaim irq_context: 0 &type->s_umount_key#35/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#35/1 pool_lock#2 irq_context: 0 &type->s_umount_key#35/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#35/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#35/1 shrinker_mutex irq_context: 0 &type->s_umount_key#35/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#35/1 sb_lock irq_context: 0 &type->s_umount_key#35/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#35/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#35/1 &sb->s_type->i_lock_key#26 irq_context: 0 &type->s_umount_key#35/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#35/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#35/1 &sb->s_type->i_lock_key#26 &dentry->d_lock irq_context: 0 &type->s_umount_key#35/1 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem rename_lock irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock mount_lock.seqcount &new_ns->poll irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock mount_lock.seqcount &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock mount_lock.seqcount rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 namespace_sem mount_lock mount_lock.seqcount &obj_hash[i].lock irq_context: 0 &type->s_umount_key#36 irq_context: 0 &type->s_umount_key#36 sb_lock irq_context: 0 &type->s_umount_key#36 &dentry->d_lock irq_context: 0 redirect_lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock fs_reclaim irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock pool_lock#2 irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &mm->mmap_lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &tty->write_wait irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &rq->__lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &port_lock_key irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &port_lock_key &port->lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &port_lock_key &tty->write_wait irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &port_lock_key &tty->write_wait &p->pi_lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock hrtimer_bases.lock irq_context: 0 (wq_completion)pm (work_completion)(&dev->power.work) &dev->power.lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &port->lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &tty->write_wait irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &port_lock_key irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &port_lock_key &dev->power.lock irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->files_lock irq_context: 0 &tty->ldisc_sem &tty->write_wait irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &c->lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &____s->seqcount irq_context: 0 &type->s_umount_key#37/1 irq_context: 0 &type->s_umount_key#37/1 fs_reclaim irq_context: 0 &type->s_umount_key#37/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#37/1 pool_lock#2 irq_context: 0 &type->s_umount_key#37/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#37/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#37/1 shrinker_mutex irq_context: 0 &type->s_umount_key#37/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#37/1 sb_lock irq_context: 0 &type->s_umount_key#37/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#37/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#37/1 &sb->s_type->i_lock_key#27 irq_context: 0 &type->s_umount_key#37/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#37/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#37/1 &sb->s_type->i_lock_key#27 &dentry->d_lock irq_context: 0 &type->s_umount_key#37/1 fuse_mutex irq_context: 0 &type->s_umount_key#37/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#38/1 irq_context: 0 &type->s_umount_key#38/1 fs_reclaim irq_context: 0 &type->s_umount_key#38/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#38/1 pool_lock#2 irq_context: 0 &type->s_umount_key#38/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#38/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#38/1 shrinker_mutex irq_context: 0 &type->s_umount_key#38/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#38/1 sb_lock irq_context: 0 &type->s_umount_key#38/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#38/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_lock_key#28 irq_context: 0 &type->s_umount_key#38/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#38/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_lock_key#28 &dentry->d_lock irq_context: 0 &type->s_umount_key#38/1 pstore_sb_lock irq_context: 0 &type->s_umount_key#38/1 pstore_sb_lock &sb->s_type->i_mutex_key#12 irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 fs_reclaim irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &zone->lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &____s->seqcount irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex (efivars_lock).lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex fs_reclaim irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex pool_lock#2 irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex (efi_runtime_lock).lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex &obj_hash[i].lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex rcu_read_lock &pool->lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex &x->wait#12 irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex &rq->__lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &psinfo->read_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#38/1 &sb->s_type->i_mutex_key#12 &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#38/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#39/1 irq_context: 0 &type->s_umount_key#39/1 fs_reclaim irq_context: 0 &type->s_umount_key#39/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#39/1 pool_lock#2 irq_context: 0 &type->s_umount_key#39/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#39/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#39/1 shrinker_mutex irq_context: 0 &type->s_umount_key#39/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#39/1 sb_lock irq_context: 0 &type->s_umount_key#39/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#39/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#39/1 &sb->s_type->i_lock_key#29 irq_context: 0 &type->s_umount_key#39/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#39/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#39/1 &____s->seqcount irq_context: 0 &type->s_umount_key#39/1 &sb->s_type->i_lock_key#29 &dentry->d_lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock (kmod_concurrent_max).lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock fs_reclaim irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock pool_lock#2 irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock rcu_read_lock &pool->lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock &x->wait#17 irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock &rq->__lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock &____s->seqcount irq_context: 0 uts_sem irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock key irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock pcpu_lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock percpu_counters_lock irq_context: 0 &type->s_umount_key#39/1 bpf_preload_lock running_helpers_waitq.lock irq_context: 0 &type->s_umount_key#39/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#14 irq_context: 0 &type->s_umount_key#14 sb_lock irq_context: 0 &type->s_umount_key#14 fs_reclaim irq_context: 0 &type->s_umount_key#14 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#14 pool_lock#2 irq_context: 0 &type->s_umount_key#14 &dentry->d_lock irq_context: 0 &type->s_umount_key#14 &lru->node[i].lock irq_context: 0 &type->s_umount_key#14 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#14 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_lock_key irq_context: 0 &type->i_mutex_dir_key#5 irq_context: 0 &type->i_mutex_dir_key#5 fs_reclaim irq_context: 0 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock &wq irq_context: 0 sb_writers#5 irq_context: 0 sb_writers#5 mount_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &dentry->d_lock &wq#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &sbinfo->stat_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &sb->s_type->i_lock_key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &s->s_inode_list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 batched_entropy_u32.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &xattrs->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &xattrs->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &sb->s_type->i_lock_key &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sbinfo->stat_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_lock_key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &s->s_inode_list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 batched_entropy_u32.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &xattrs->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &xattrs->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_lock_key &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &____s->seqcount irq_context: 0 sb_writers#5 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_lock_key irq_context: 0 sb_writers#5 &wb->list_lock irq_context: 0 sb_writers#5 &wb->list_lock &sb->s_type->i_lock_key irq_context: 0 &vma->vm_lock->lock &mm->page_table_lock irq_context: softirq (&cb->timer) irq_context: softirq (&cb->timer) &obj_hash[i].lock irq_context: softirq (&cb->timer) &base->lock irq_context: softirq (&cb->timer) &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock sb_writers#3 pool_lock#2 irq_context: 0 &f->f_lock irq_context: 0 &mm->mmap_lock batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock kfence_freelist_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &dentry->d_lock &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &dentry->d_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &meta->lock irq_context: 0 &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &rq->__lock irq_context: 0 uts_sem irq_context: 0 uts_sem hostname_poll.wait.lock irq_context: hardirq bit_wait_table + i &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_es_lock key#5 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock key#6 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock &c->lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock rcu_read_lock &____s->seqcount irq_context: 0 rcu_read_lock rcu_read_lock mount_lock.seqcount irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 &f->f_pos_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_es_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_es_lock key#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mount_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &ei->i_raw_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_wait_updates irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &sb->s_type->i_lock_key#22 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &wb->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 &f->f_pos_lock &mm->mmap_lock irq_context: 0 &fs->lock &dentry->d_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mm->page_table_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock ptlock_ptr(ptdesc)#2 rcu_read_lock ptlock_ptr(ptdesc)#2/1 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 &rq->__lock cpu_asid_lock irq_context: softirq (&net->can.stattimer) irq_context: softirq (&net->can.stattimer) &obj_hash[i].lock irq_context: softirq (&net->can.stattimer) &base->lock irq_context: softirq (&net->can.stattimer) &base->lock &obj_hash[i].lock irq_context: softirq mm/vmstat.c:2022 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex &mm->mmap_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#4 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#4 &lruvec->lru_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#4 &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#5 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &vma->vm_lock->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#4 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#4 &lruvec->lru_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock lock#5 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &lruvec->lru_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock key irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock pcpu_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock percpu_counters_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock pool_lock#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#5 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 &type->i_mutex_dir_key#5 rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock &wq#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &xattrs->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &mm->mmap_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 fs_reclaim irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &____s->seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &xa->xa_lock#9 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &sb->s_type->i_lock_key irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &info->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 lock#4 irq_context: 0 &p->alloc_lock &x->wait#25 irq_context: 0 &p->alloc_lock &x->wait#25 &p->pi_lock irq_context: 0 &p->alloc_lock &x->wait#25 &p->pi_lock &rq->__lock irq_context: 0 &p->alloc_lock &x->wait#25 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock &rq->__lock irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sighand->siglock &obj_hash[i].lock irq_context: 0 &sighand->siglock pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &u->bindlock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &u->bindlock &net->unx.table.locks[i] irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &u->bindlock &net->unx.table.locks[i] &net->unx.table.locks[i]/1 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &u->bindlock &net->unx.table.locks[i] &net->unx.table.locks[i]/1 &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &u->bindlock &net->unx.table.locks[i]/1 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &u->bindlock &bsd_socket_locks[i] irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &c->lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &____s->seqcount irq_context: 0 &u->iolock irq_context: 0 &u->iolock rlock-AF_UNIX irq_context: 0 &ei->socket.wq.wait irq_context: 0 &vma->vm_lock->lock ptlock_ptr(ptdesc)#2 irq_context: 0 key#8 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &wb->list_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &wb->list_lock &sb->s_type->i_lock_key irq_context: 0 &sig->cred_guard_mutex &stopper->lock irq_context: 0 &sig->cred_guard_mutex &stop_pi_lock irq_context: 0 &sig->cred_guard_mutex &stop_pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &stop_pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &x->wait#8 irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock &x->wait#25 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &bsd_socket_locks[i] irq_context: 0 sb_writers tk_core.seq.seqcount irq_context: 0 sb_writers &sb->s_type->i_lock_key#5 irq_context: 0 sb_writers &wb->list_lock irq_context: 0 sb_writers &wb->list_lock &sb->s_type->i_lock_key#5 irq_context: 0 &u->lock irq_context: 0 &u->lock &u->lock/1 irq_context: 0 &u->lock/1 irq_context: 0 &u->lock rlock-AF_UNIX irq_context: 0 rcu_read_lock &ei->socket.wq.wait irq_context: 0 rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 syslog_lock irq_context: 0 &u->lock &u->peer_wait irq_context: 0 &u->iolock &u->peer_wait irq_context: 0 &u->iolock &u->peer_wait &p->pi_lock irq_context: 0 &u->iolock &u->peer_wait &p->pi_lock &rq->__lock irq_context: 0 &u->iolock &u->peer_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &u->iolock &mm->mmap_lock irq_context: 0 &u->iolock &obj_hash[i].lock irq_context: 0 &u->iolock pool_lock#2 irq_context: 0 &u->iolock &rq->__lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &u->iolock quarantine_lock irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_lock_key#14 irq_context: 0 &sb->s_type->i_lock_key#14 &dentry->d_lock irq_context: 0 &pipe->mutex/1 irq_context: 0 &pipe->rd_wait irq_context: 0 &u->peer_wait irq_context: softirq (&cb->timer) tk_core.seq.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rename_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &xa->xa_lock#9 &c->lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &xa->xa_lock#9 &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &sem->wait_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &sem->wait_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &sem->wait_lock irq_context: 0 cgroup_threadgroup_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#5 irq_context: 0 &vma->vm_lock->lock &pcp->lock &zone->lock irq_context: 0 &vma->vm_lock->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem &mapping->i_private_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#5 pool_lock#2 irq_context: 0 &u->lock clock-AF_UNIX irq_context: 0 rlock-AF_UNIX irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &obj_hash[i].lock pool_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_node_0 irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &pipe->rd_wait irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &p->pi_lock irq_context: 0 &pipe->mutex/1 &pipe->wr_wait irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &p->pi_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 fs_reclaim irq_context: 0 &pipe->mutex/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &pipe->mutex/1 &____s->seqcount irq_context: 0 &pipe->mutex/1 &mm->mmap_lock irq_context: 0 &pipe->rd_wait &p->pi_lock irq_context: 0 sb_writers#6 tk_core.seq.seqcount irq_context: 0 sb_writers#6 mount_lock irq_context: 0 &pipe->mutex/1 &rq->__lock irq_context: 0 &pipe->mutex/1 &lock->wait_lock irq_context: 0 &pipe->mutex/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &lock->wait_lock irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 batched_entropy_u8.lock irq_context: 0 sb_writers#3 batched_entropy_u8.lock crngs.lock irq_context: 0 sb_writers#3 kfence_freelist_lock irq_context: 0 sb_writers#3 &meta->lock irq_context: 0 sb_writers#4 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rename_lock.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &dentry->d_lock sysctl_lock irq_context: 0 sb_writers#4 sysctl_lock irq_context: 0 sb_writers#4 &dentry->d_lock irq_context: 0 sb_writers#4 tomoyo_ss irq_context: 0 sb_writers#4 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 tomoyo_ss rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#4 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#4 tomoyo_ss &c->lock irq_context: 0 sb_writers#4 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers#4 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#4 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#4 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#4 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#4 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tk_core.seq.seqcount irq_context: 0 sb_writers#4 fs_reclaim irq_context: 0 sb_writers#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 pool_lock#2 irq_context: 0 sb_writers#4 &mm->mmap_lock irq_context: 0 sb_writers#4 &obj_hash[i].lock irq_context: 0 mapping.invalidate_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock &wq irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock &____s->seqcount irq_context: 0 nl_table_lock pool_lock#2 irq_context: 0 sk_lock-AF_NETLINK irq_context: 0 sk_lock-AF_NETLINK slock-AF_NETLINK irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rhashtable_bucket irq_context: 0 slock-AF_NETLINK irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &u->bindlock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &u->bindlock &net->unx.table.locks[i] irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &u->bindlock &net->unx.table.locks[i] &net->unx.table.locks[i]/1 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &u->bindlock &net->unx.table.locks[i] &net->unx.table.locks[i]/1 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &u->bindlock &bsd_socket_locks[i] irq_context: 0 &u->lock &sk->sk_peer_lock irq_context: 0 &type->i_mutex_dir_key#3 &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: hardirq log_wait.lock &p->pi_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle pool_lock#2 irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &ret->b_state_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_revoke_lock irq_context: 0 &mm->mmap_lock sb_writers#3 &sb->s_type->i_lock_key#22 irq_context: 0 &mm->mmap_lock sb_writers#3 &wb->list_lock irq_context: 0 &mm->mmap_lock sb_writers#3 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &type->i_mutex_dir_key#5 &c->lock irq_context: 0 &type->i_mutex_dir_key#5 &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &xa->xa_lock#9 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 lock#4 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mapping->i_private_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 bit_wait_table + i irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&krcp->monitor_work)->timer irq_context: softirq &(&krcp->monitor_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&krcp->monitor_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&krcp->monitor_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&krcp->monitor_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&krcp->monitor_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) krc.lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 batched_entropy_u8.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 kfence_freelist_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &ret->b_state_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_revoke_lock irq_context: softirq &(&tbl->managed_work)->timer irq_context: softirq &(&tbl->managed_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&tbl->managed_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&tbl->managed_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 mapping.invalidate_lock &ei->i_es_lock key#5 irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock key#6 irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 &xattrs->lock irq_context: 0 &mm->mmap_lock &lruvec->lru_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &dentry->d_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &s->s_inode_list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &p->alloc_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq#2 irq_context: 0 sb_writers#4 oom_adj_mutex irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &p->alloc_lock irq_context: 0 sb_writers#4 oom_adj_mutex &p->alloc_lock irq_context: 0 &sb->s_type->i_lock_key#15 &dentry->d_lock irq_context: 0 &group->mark_mutex irq_context: 0 &group->mark_mutex &fsnotify_mark_srcu irq_context: 0 &group->mark_mutex fs_reclaim irq_context: 0 &group->mark_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &group->mark_mutex &____s->seqcount irq_context: 0 &group->mark_mutex &c->lock irq_context: 0 &group->mark_mutex pool_lock#2 irq_context: 0 &group->mark_mutex lock irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock pool_lock#2 irq_context: 0 &group->mark_mutex ucounts_lock irq_context: 0 &group->mark_mutex &mark->lock irq_context: 0 &group->mark_mutex &mark->lock &fsnotify_mark_srcu irq_context: 0 &group->mark_mutex &mark->lock &fsnotify_mark_srcu &conn->lock irq_context: 0 &group->mark_mutex &mark->lock &conn->lock irq_context: 0 &group->mark_mutex &conn->lock irq_context: 0 &group->mark_mutex &sb->s_type->i_lock_key#22 irq_context: 0 &group->mark_mutex &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 &group->mark_mutex &sb->s_type->i_lock_key#22 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &group->mark_mutex &sb->s_type->i_lock_key irq_context: 0 &group->mark_mutex &sb->s_type->i_lock_key &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock pool_lock#2 irq_context: 0 &sk->sk_peer_lock irq_context: 0 &ep->mtx irq_context: 0 epnested_mutex irq_context: 0 epnested_mutex &ep->mtx irq_context: 0 epnested_mutex &ep->mtx fs_reclaim irq_context: 0 epnested_mutex &ep->mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 epnested_mutex &ep->mtx &____s->seqcount irq_context: 0 epnested_mutex &ep->mtx &c->lock irq_context: 0 epnested_mutex &ep->mtx pool_lock#2 irq_context: 0 epnested_mutex &ep->mtx &f->f_lock irq_context: 0 epnested_mutex &ep->mtx &ei->socket.wq.wait irq_context: 0 epnested_mutex &ep->mtx &ep->lock irq_context: 0 epnested_mutex rcu_read_lock &f->f_lock irq_context: 0 &ep->mtx fs_reclaim irq_context: 0 &ep->mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ep->mtx &f->f_lock irq_context: 0 &ep->mtx pool_lock#2 irq_context: 0 &ep->mtx &group->notification_waitq irq_context: 0 &ep->mtx &group->notification_lock irq_context: 0 &ep->mtx &ep->lock irq_context: 0 &ep->mtx &sighand->signalfd_wqh irq_context: 0 &ep->mtx &sighand->siglock irq_context: 0 &ep->mtx &ei->socket.wq.wait irq_context: 0 &ep->lock irq_context: hardirq &dev->power.lock hrtimer_bases.lock irq_context: hardirq log_wait.lock &p->pi_lock &rq->__lock irq_context: hardirq &dev->power.lock hrtimer_bases.lock &obj_hash[i].lock irq_context: hardirq log_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss remove_cache_srcu irq_context: 0 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 tomoyo_ss &n->list_lock irq_context: 0 tomoyo_ss &n->list_lock &c->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &obj_hash[i].lock pool_lock irq_context: 0 &type->i_mutex_dir_key#4 rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 &dentry->d_lock &wq#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 vmap_area_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &dentry->d_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock fs_reclaim irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key#4 &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#4 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 remove_cache_srcu &c->lock irq_context: 0 remove_cache_srcu &n->list_lock irq_context: 0 remove_cache_srcu &obj_hash[i].lock irq_context: 0 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#4 &n->list_lock irq_context: 0 &type->i_mutex_dir_key#4 &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 sb_writers#7 mount_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 sb_writers#7 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 sb_writers#7 &sb->s_type->i_lock_key#24 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 sb_writers#7 &wb->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 sb_writers#7 &wb->list_lock &sb->s_type->i_lock_key#24 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_callback &pcp->lock &zone->lock irq_context: softirq rcu_callback &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#4 batched_entropy_u8.lock irq_context: 0 &type->i_mutex_dir_key#4 kfence_freelist_lock irq_context: 0 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: softirq rcu_callback &zone->lock irq_context: softirq rcu_callback &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#4 quarantine_lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu quarantine_lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &c->lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &n->list_lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: softirq (&vblank->disable_timer) irq_context: softirq (&vblank->disable_timer) &dev->vbl_lock irq_context: softirq (&vblank->disable_timer) &dev->vbl_lock &dev->vblank_time_lock irq_context: softirq (&vblank->disable_timer) &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock irq_context: softirq (&vblank->disable_timer) &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 swap_lock irq_context: 0 sb_writers#7 irq_context: 0 sb_writers#7 mount_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rename_lock.seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 fs_reclaim irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &dentry->d_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem inode_hash_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#24 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sb->s_type->i_lock_key#24 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 iattr_mutex irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &obj_hash[i].lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 &dentry->d_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &sb->s_type->i_lock_key#24 &dentry->d_lock &wq#2 irq_context: 0 kn->active fs_reclaim irq_context: 0 kn->active fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active &c->lock irq_context: 0 kn->active pool_lock#2 irq_context: 0 kn->active &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active &kernfs_locks->open_file_mutex[count] pool_lock#2 irq_context: 0 sb_writers#7 fs_reclaim irq_context: 0 sb_writers#7 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 pool_lock#2 irq_context: 0 sb_writers#7 &mm->mmap_lock irq_context: 0 sb_writers#7 &of->mutex irq_context: 0 sb_writers#7 &of->mutex kn->active &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active &obj_hash[i].lock irq_context: 0 sb_writers#7 &obj_hash[i].lock irq_context: 0 &kernfs_locks->open_file_mutex[count] irq_context: 0 &kernfs_locks->open_file_mutex[count] &obj_hash[i].lock irq_context: 0 &ep->mtx &mm->mmap_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] pool_lock#2 irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock &obj_hash[i].lock irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock &base->lock irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &____s->seqcount irq_context: 0 kn->active#2 fs_reclaim irq_context: 0 kn->active#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#2 &c->lock irq_context: 0 kn->active#2 &____s->seqcount irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#2 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#2 pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &obj_hash[i].lock irq_context: 0 rlock-AF_NETLINK irq_context: 0 &nlk->wait irq_context: 0 kn->active#2 &n->list_lock irq_context: 0 kn->active#2 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &c->lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 kn->active &n->list_lock irq_context: 0 kn->active &n->list_lock &c->lock irq_context: 0 kn->active &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 &kernfs_locks->open_file_mutex[count] &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &n->list_lock &c->lock irq_context: 0 sb_writers#7 remove_cache_srcu irq_context: 0 sb_writers#7 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#2 &pcp->lock &zone->lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &____s->seqcount irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active &____s->seqcount irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active quarantine_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu quarantine_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &n->list_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex &____s->seqcount irq_context: 0 kn->active &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 &c->lock irq_context: 0 &sb->s_type->i_mutex_key#9 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &c->lock irq_context: 0 sb_writers#7 &n->list_lock irq_context: 0 sb_writers#7 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 kn->active#2 remove_cache_srcu irq_context: 0 kn->active#2 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#2 remove_cache_srcu &c->lock irq_context: 0 kn->active#2 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#2 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#2 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#2 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#4 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#4 tomoyo_ss &n->list_lock &c->lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &c->lock irq_context: 0 &vma->vm_lock->lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 quarantine_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &n->list_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#9 &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#9 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#5 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#5 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override pool_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &c->lock irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 remove_cache_srcu &rq->__lock irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active &pcp->lock &zone->lock irq_context: 0 kn->active &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock remove_cache_srcu irq_context: 0 &mm->mmap_lock remove_cache_srcu quarantine_lock irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) krc.lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock irq_context: 0 &ep->mtx &c->lock irq_context: 0 &ep->mtx &____s->seqcount irq_context: 0 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&q->timeout) irq_context: softirq (&q->timeout) rcu_read_lock &pool->lock irq_context: softirq (&q->timeout) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&q->timeout) rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&q->timeout) rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&q->timeout) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&q->timeout) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)kblockd irq_context: 0 (wq_completion)kblockd (work_completion)(&q->timeout_work) irq_context: 0 kn->active#2 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 kn->active#2 remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 kn->active#2 remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 fs_reclaim &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) krc.lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 quarantine_lock irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#5 &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#5 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex quarantine_lock irq_context: 0 &type->i_mutex_dir_key#5 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &sem->wait_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#5 &sem->wait_lock irq_context: 0 &type->i_mutex_dir_key#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sem->wait_lock irq_context: 0 sb_writers#5 &sem->wait_lock irq_context: 0 sb_writers#5 &p->pi_lock irq_context: 0 sb_writers#5 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] fs_reclaim &rq->__lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu quarantine_lock irq_context: 0 &ep->mtx &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#2 uevent_sock_mutex &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem pool_lock#2 irq_context: 0 kn->active#2 &rq->__lock irq_context: 0 sb_writers#7 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback quarantine_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem batched_entropy_u8.lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 batched_entropy_u8.lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 kfence_freelist_lock irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] remove_cache_srcu irq_context: 0 kn->active#2 &kernfs_locks->open_file_mutex[count] remove_cache_srcu quarantine_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override &c->lock irq_context: 0 kn->active#3 fs_reclaim irq_context: 0 kn->active#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#3 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#3 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#3 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#3 pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &____s->seqcount irq_context: 0 kn->active#3 &c->lock irq_context: 0 kn->active#3 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#3 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 kn->active#3 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &n->list_lock &c->lock irq_context: 0 kn->active#3 &n->list_lock irq_context: 0 kn->active#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#3 remove_cache_srcu irq_context: 0 kn->active#3 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#3 remove_cache_srcu &c->lock irq_context: 0 kn->active#3 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#3 &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex kfence_freelist_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &n->list_lock irq_context: 0 kn->active#3 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#3 uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &n->list_lock irq_context: 0 &mm->mmap_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu irq_context: 0 &mm->mmap_lock remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 remove_cache_srcu irq_context: 0 sb_writers#3 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sb->s_type->i_lock_key#24 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu quarantine_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &c->lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &n->list_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &c->lock irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &n->list_lock irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock &wq &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock &wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock &wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#5 &dentry->d_lock pool_lock#2 irq_context: 0 sb_writers#7 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &sb->s_type->i_lock_key#24 irq_context: 0 sb_writers#7 &wb->list_lock irq_context: 0 sb_writers#7 &wb->list_lock &sb->s_type->i_lock_key#24 irq_context: 0 kn->active#4 fs_reclaim irq_context: 0 kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#4 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#4 pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 quarantine_lock irq_context: 0 kn->active#4 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &c->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#4 &n->list_lock irq_context: 0 kn->active#4 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &device->physical_node_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 udc_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 fw_lock irq_context: 0 sb_writers#7 quarantine_lock irq_context: 0 kn->active#4 remove_cache_srcu irq_context: 0 kn->active#4 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#4 remove_cache_srcu &c->lock irq_context: 0 kn->active#4 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#4 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#4 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 kn->active#4 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &rq->__lock irq_context: 0 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &rfkill->lock irq_context: softirq &(&ovs_net->masks_rebalance)->timer irq_context: softirq &(&ovs_net->masks_rebalance)->timer rcu_read_lock &pool->lock irq_context: softirq &(&ovs_net->masks_rebalance)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&ovs_net->masks_rebalance)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) ovs_mutex irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) &base->lock &obj_hash[i].lock irq_context: 0 kn->active#4 &pcp->lock &zone->lock irq_context: 0 kn->active#4 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &n->list_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &rq->__lock irq_context: 0 &u->bindlock irq_context: 0 &u->bindlock fs_reclaim irq_context: 0 &u->bindlock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &u->bindlock pool_lock#2 irq_context: 0 &u->bindlock batched_entropy_u32.lock irq_context: 0 &u->bindlock batched_entropy_u32.lock crngs.lock irq_context: 0 &u->bindlock &net->unx.table.locks[i] irq_context: 0 &u->bindlock &net->unx.table.locks[i] &net->unx.table.locks[i]/1 irq_context: 0 &u->bindlock &net->unx.table.locks[i]/1 irq_context: 0 &u->lock &u->lock/1 &sk->sk_peer_lock irq_context: 0 &u->lock &u->lock/1 &dentry->d_lock irq_context: 0 &u->lock &u->lock/1 &sk->sk_peer_lock &sk->sk_peer_lock/1 irq_context: 0 &u->lock &u->lock/1 &sk->sk_peer_lock/1 irq_context: 0 &u->iolock rcu_read_lock &ei->socket.wq.wait irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &ei->socket.wq.wait irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &dentry->d_lock irq_context: 0 &group->mark_mutex &pcp->lock &zone->lock irq_context: 0 &group->mark_mutex &sb->s_type->i_lock_key &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &group->notification_waitq irq_context: 0 &group->notification_lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#5 batched_entropy_u8.lock irq_context: 0 &type->i_mutex_dir_key#5 kfence_freelist_lock irq_context: 0 kn->active#5 fs_reclaim irq_context: 0 kn->active#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#5 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#5 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#5 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex irq_context: 0 &p->lock &of->mutex kn->active#5 param_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rename_lock.seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &dentry->d_lock irq_context: 0 sb_writers#7 tomoyo_ss irq_context: 0 sb_writers#7 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 tomoyo_ss &c->lock irq_context: 0 sb_writers#7 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#7 kn->active#5 fs_reclaim irq_context: 0 sb_writers#7 kn->active#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 kn->active#5 &c->lock irq_context: 0 sb_writers#7 kn->active#5 &kernfs_locks->open_file_mutex[count] irq_context: 0 sb_writers#7 kn->active#5 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 sb_writers#7 kn->active#5 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 kn->active#5 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 sb_writers#7 iattr_mutex irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex fs_reclaim irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex &____s->seqcount irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex &c->lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex pool_lock#2 irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#5 param_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#5 param_lock disk_events_mutex irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#5 rcu_node_0 irq_context: softirq &(&tbl->managed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&tbl->managed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#4 pool_lock#2 irq_context: 0 &p->lock &of->mutex kn->active#4 &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &p->lock &of->mutex kn->active#4 &c->lock irq_context: 0 &p->lock &of->mutex kn->active#4 &____s->seqcount irq_context: 0 mapping.invalidate_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &base->lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &rq->__lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex kn->active#4 &device->physical_node_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &p->lock &n->list_lock irq_context: 0 &p->lock &n->list_lock &c->lock irq_context: 0 &p->lock &of->mutex kn->active#4 &pcp->lock &zone->lock irq_context: 0 mapping.invalidate_lock &folio_wait_table[i] &p->pi_lock irq_context: 0 mapping.invalidate_lock &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] irq_context: 0 mapping.invalidate_lock &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock &c->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 fs_reclaim irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &____s->seqcount#6 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &____s->seqcount#6 &____s->seqcount#6/1 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock/2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock/2 &dentry->d_lock/3 irq_context: 0 &p->lock remove_cache_srcu irq_context: 0 &p->lock remove_cache_srcu quarantine_lock irq_context: 0 &p->lock remove_cache_srcu &c->lock irq_context: 0 &p->lock remove_cache_srcu &n->list_lock irq_context: 0 &p->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &p->lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &p->lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &p->lock &of->mutex kn->active#4 udc_lock irq_context: 0 kn->active#6 fs_reclaim irq_context: 0 kn->active#6 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#6 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#6 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#6 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &of->mutex irq_context: 0 kn->active#7 fs_reclaim irq_context: 0 kn->active#7 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#8 fs_reclaim irq_context: 0 kn->active#8 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#8 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#8 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#8 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#9 fs_reclaim irq_context: 0 kn->active#9 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#9 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#9 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#9 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#9 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#10 fs_reclaim irq_context: 0 kn->active#10 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#10 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#10 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#10 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#11 fs_reclaim irq_context: 0 kn->active#11 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#11 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#11 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#11 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#12 fs_reclaim irq_context: 0 kn->active#12 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#12 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#12 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#12 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#4 &n->list_lock irq_context: 0 &p->lock &of->mutex kn->active#4 &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &n->list_lock &c->lock irq_context: 0 mapping.invalidate_lock rcu_read_lock &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sem->wait_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#13 fs_reclaim irq_context: 0 kn->active#13 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#13 dev_base_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &n->list_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 kn->active#13 &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 quarantine_lock irq_context: 0 kn->active#4 remove_cache_srcu &rq->__lock irq_context: 0 kn->active#4 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &c->lock irq_context: 0 kn->active#14 fs_reclaim irq_context: 0 kn->active#14 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 &p->lock &of->mutex kn->active#14 dev_base_lock irq_context: 0 kn->active#15 fs_reclaim irq_context: 0 kn->active#15 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#15 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#15 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#15 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#16 fs_reclaim irq_context: 0 kn->active#16 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#16 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#16 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#16 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#16 dev_base_lock irq_context: 0 kn->active#17 fs_reclaim irq_context: 0 kn->active#17 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#17 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#17 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#17 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#17 dev_base_lock irq_context: 0 sb_writers#5 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 kn->active#18 fs_reclaim irq_context: 0 kn->active#18 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#18 &c->lock irq_context: 0 kn->active#18 &n->list_lock irq_context: 0 kn->active#18 &n->list_lock &c->lock irq_context: 0 kn->active#18 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#18 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#18 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#19 fs_reclaim irq_context: 0 kn->active#19 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#19 &c->lock irq_context: 0 kn->active#19 &____s->seqcount irq_context: 0 kn->active#19 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#19 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#19 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#20 fs_reclaim irq_context: 0 kn->active#20 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#21 fs_reclaim irq_context: 0 kn->active#21 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#21 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#21 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#21 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#22 fs_reclaim irq_context: 0 kn->active#22 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#22 &c->lock irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#22 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mapping.invalidate_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 kn->active#23 fs_reclaim irq_context: 0 kn->active#23 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#23 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#23 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#23 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#24 fs_reclaim irq_context: 0 kn->active#24 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#24 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#24 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#24 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#4 fw_lock irq_context: 0 kn->active#25 fs_reclaim irq_context: 0 kn->active#25 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#25 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#25 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#25 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#25 dev_base_lock irq_context: 0 kn->active#26 fs_reclaim irq_context: 0 kn->active#26 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#26 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#26 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#26 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &of->mutex kn->active#26 &dev->power.lock irq_context: 0 &of->mutex kn->active#26 pci_lock irq_context: 0 kn->active#27 fs_reclaim irq_context: 0 kn->active#27 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#27 &c->lock irq_context: 0 kn->active#27 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#27 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#27 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#28 fs_reclaim irq_context: 0 kn->active#28 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#28 &c->lock irq_context: 0 kn->active#28 &n->list_lock irq_context: 0 kn->active#28 &n->list_lock &c->lock irq_context: 0 kn->active#28 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#28 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#28 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 slock-AF_INET/1 irq_context: 0 rtnl_mutex devnet_rename_sem irq_context: 0 rtnl_mutex devnet_rename_sem (console_sem).lock irq_context: 0 rtnl_mutex devnet_rename_sem console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex devnet_rename_sem console_lock console_srcu console_owner irq_context: 0 rtnl_mutex devnet_rename_sem console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex devnet_rename_sem console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex devnet_rename_sem fs_reclaim irq_context: 0 rtnl_mutex devnet_rename_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex devnet_rename_sem pool_lock#2 irq_context: 0 rtnl_mutex devnet_rename_sem &k->list_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem fs_reclaim irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem pool_lock#2 irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem kernfs_rename_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex devnet_rename_sem &c->lock irq_context: 0 rtnl_mutex devnet_rename_sem &n->list_lock irq_context: 0 rtnl_mutex devnet_rename_sem &n->list_lock &c->lock irq_context: 0 rtnl_mutex devnet_rename_sem kernfs_rename_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex fs_reclaim irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex pool_lock#2 irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex nl_table_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex nl_table_wait.lock irq_context: 0 rtnl_mutex devnet_rename_sem &obj_hash[i].lock irq_context: 0 rtnl_mutex &x->wait#3 irq_context: 0 kn->active#4 &rq->__lock irq_context: 0 kn->active#29 fs_reclaim irq_context: 0 kn->active#29 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#29 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#29 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#29 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#30 fs_reclaim irq_context: 0 kn->active#30 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#30 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#30 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#30 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#30 fs_reclaim irq_context: 0 &p->lock &of->mutex kn->active#30 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &p->lock &of->mutex kn->active#30 pool_lock#2 irq_context: 0 &p->lock &of->mutex kn->active#30 &obj_hash[i].lock irq_context: 0 kn->active#20 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#22 remove_cache_srcu irq_context: 0 kn->active#22 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#22 remove_cache_srcu &c->lock irq_context: 0 kn->active#22 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#22 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#22 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#22 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &evdev->client_lock irq_context: 0 &evdev->mutex irq_context: 0 &evdev->mutex &dev->mutex#2 irq_context: 0 kn->active#23 &c->lock irq_context: 0 kn->active#24 &c->lock irq_context: 0 kn->active#24 &n->list_lock irq_context: 0 kn->active#24 &n->list_lock &c->lock irq_context: 0 kn->active#31 fs_reclaim irq_context: 0 kn->active#31 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#31 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#31 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#31 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#32 fs_reclaim irq_context: 0 kn->active#32 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#32 &c->lock irq_context: 0 kn->active#32 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#32 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#32 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#33 fs_reclaim irq_context: 0 kn->active#33 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#33 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#33 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#33 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rcu_read_lock &____s->seqcount#6 irq_context: 0 &sb->s_type->i_mutex_key#15 irq_context: 0 mapping.invalidate_lock#2 irq_context: 0 mapping.invalidate_lock#2 mmu_notifier_invalidate_range_start irq_context: 0 mapping.invalidate_lock#2 &____s->seqcount irq_context: 0 mapping.invalidate_lock#2 &xa->xa_lock#9 irq_context: 0 mapping.invalidate_lock#2 &xa->xa_lock#9 pool_lock#2 irq_context: 0 mapping.invalidate_lock#2 lock#4 irq_context: 0 mapping.invalidate_lock#2 pool_lock#2 irq_context: 0 mapping.invalidate_lock#2 tk_core.seq.seqcount irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &p->lock &of->mutex kn->active#4 quarantine_lock irq_context: 0 mapping.invalidate_lock#2 &xa->xa_lock#9 &c->lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &n->list_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &n->list_lock &c->lock irq_context: 0 kn->active#34 fs_reclaim irq_context: 0 kn->active#34 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#34 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#34 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#34 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 batched_entropy_u32.lock crngs.lock irq_context: 0 kn->active#29 &c->lock irq_context: 0 kn->active#30 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 &p->lock &of->mutex kn->active#30 &c->lock irq_context: 0 mapping.invalidate_lock#2 &pcp->lock &zone->lock irq_context: 0 mapping.invalidate_lock#2 lock#4 &lruvec->lru_lock irq_context: 0 mapping.invalidate_lock#2 &c->lock irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim &rq->__lock irq_context: 0 tomoyo_ss remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex sysctl_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex sysctl_lock pool_lock#2 irq_context: 0 rtnl_mutex sysctl_lock krc.lock irq_context: 0 rtnl_mutex &nft_net->commit_mutex irq_context: 0 rtnl_mutex proc_subdir_lock irq_context: 0 rtnl_mutex &ent->pde_unload_lock irq_context: 0 rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 rtnl_mutex proc_subdir_lock irq_context: 0 rtnl_mutex target_list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_INET irq_context: 0 sb_writers#7 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers#7 kn->active#4 fs_reclaim irq_context: 0 sb_writers#7 kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 kn->active#4 &c->lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock rcu_node_0 irq_context: 0 &evdev->mutex &dev->mutex#2 &obj_hash[i].lock irq_context: 0 &evdev->mutex &dev->mutex#2 &x->wait#3 irq_context: 0 &evdev->mutex &dev->mutex#2 &rq->__lock irq_context: 0 &evdev->mutex &dev->mutex#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &evdev->mutex &dev->mutex#2 rcu_read_lock &rq->__lock irq_context: 0 &evdev->mutex &dev->mutex#2 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#4 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 kn->active#8 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#11 &c->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#7 &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &c->lock irq_context: 0 kn->active#9 &c->lock irq_context: 0 kn->active#12 &c->lock irq_context: 0 kn->active#12 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &p->lock &of->mutex kn->active#4 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 tomoyo_ss &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#2 &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sem->wait_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &sem->wait_lock irq_context: 0 sb_writers &p->pi_lock irq_context: 0 sb_writers &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers &p->pi_lock &rq->__lock irq_context: 0 sb_writers &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#2 &sem->wait_lock irq_context: 0 tomoyo_ss rcu_read_lock rename_lock irq_context: 0 kn->active#10 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#11 &n->list_lock irq_context: 0 kn->active#11 &n->list_lock &c->lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss quarantine_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 key#9 irq_context: 0 kn->active#8 &c->lock irq_context: 0 kn->active#11 remove_cache_srcu irq_context: 0 kn->active#11 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 kn->active#7 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 &p->lock &rq->__lock irq_context: 0 &p->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx &mm->mmap_lock &rq->__lock irq_context: 0 kn->active#6 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#6 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 kn->active#6 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#10 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 kn->active#10 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 kn->active#9 &n->list_lock irq_context: 0 kn->active#9 &n->list_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 kn->active#6 &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss quarantine_lock irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock pool_lock#2 irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &retval->lock irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &c->lock irq_context: 0 lock#4 &lruvec->lru_lock irq_context: 0 mapping.invalidate_lock#2 &rq->__lock irq_context: 0 kn->active#9 remove_cache_srcu irq_context: 0 kn->active#9 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#9 remove_cache_srcu &c->lock irq_context: 0 kn->active#9 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#9 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#9 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#9 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#11 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#12 &n->list_lock irq_context: 0 kn->active#12 &n->list_lock &c->lock irq_context: 0 kn->active#10 &c->lock irq_context: 0 kn->active#10 &n->list_lock irq_context: 0 kn->active#10 &n->list_lock &c->lock irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &pcp->lock &zone->lock irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#2 &rq->__lock irq_context: 0 &type->i_mutex_dir_key#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#11 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#11 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_read_lock &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 lock#4 &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &____s->seqcount#6 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &____s->seqcount#6 &____s->seqcount#6/1 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock/2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rename_lock rename_lock.seqcount &dentry->d_lock/2 &dentry->d_lock/3 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_lock_key &xa->xa_lock#9 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 lock#4 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 lock#5 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &lruvec->lru_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &info->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &xa->xa_lock#9 irq_context: 0 kn->active#32 &n->list_lock irq_context: 0 kn->active#32 &n->list_lock &c->lock irq_context: 0 kn->active#33 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 &ep->mtx &pipe->rd_wait irq_context: 0 kn->active#33 &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &n->list_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &n->list_lock &c->lock irq_context: 0 kn->active#35 fs_reclaim irq_context: 0 kn->active#35 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#35 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#35 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#35 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#35 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &n->list_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex &n->list_lock irq_context: 0 &sig->cred_guard_mutex &n->list_lock &c->lock irq_context: 0 sb_writers &rq->__lock irq_context: 0 &mm->mmap_lock &n->list_lock irq_context: 0 &mm->mmap_lock &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu quarantine_lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &c->lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &n->list_lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override pool_lock irq_context: hardirq hrtimer_bases.lock fill_pool_map-wait-type-override &n->list_lock irq_context: hardirq hrtimer_bases.lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq rcu_callback &____s->seqcount irq_context: 0 &ep->mtx key#10 irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock irq_context: 0 &vma->vm_lock->lock fs_reclaim &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem &sem->wait_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mapping.invalidate_lock#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &sem->wait_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &sem->wait_lock irq_context: 0 &mm->mmap_lock &p->pi_lock irq_context: 0 &mm->mmap_lock &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_node_0 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 &vma->vm_lock->lock &folio_wait_table[i] irq_context: 0 &vma->vm_lock->lock &folio_wait_table[i] &p->pi_lock irq_context: 0 &vma->vm_lock->lock &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mapping.invalidate_lock#2 rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &vma->vm_lock->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 videodev_lock irq_context: 0 fh->state->lock irq_context: 0 &vdev->fh_lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->dev_mutex irq_context: 0 &dev->dev_mutex fs_reclaim irq_context: 0 &dev->dev_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->dev_mutex &c->lock irq_context: 0 &dev->dev_mutex pool_lock#2 irq_context: 0 &dev->dev_mutex vim2m:1183:(hdl)->_lock irq_context: 0 &dev->dev_mutex &____s->seqcount irq_context: 0 &dev->dev_mutex &obj_hash[i].lock irq_context: 0 &dev->dev_mutex &vdev->fh_lock irq_context: 0 &mdev->req_queue_mutex irq_context: 0 &mdev->req_queue_mutex &vdev->fh_lock irq_context: 0 &mdev->req_queue_mutex &mdev->graph_mutex irq_context: 0 &mdev->req_queue_mutex vim2m:1183:(hdl)->_lock irq_context: 0 &mdev->req_queue_mutex vim2m:1183:(hdl)->_lock &obj_hash[i].lock irq_context: 0 &mdev->req_queue_mutex vim2m:1183:(hdl)->_lock pool_lock#2 irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex &m2m_dev->job_spinlock irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex &q->done_wq irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex &q->mmap_lock irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex &obj_hash[i].lock irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex pool_lock#2 irq_context: 0 &mdev->req_queue_mutex &obj_hash[i].lock irq_context: 0 &mdev->req_queue_mutex pool_lock#2 irq_context: 0 &pipe->rd_wait &ep->lock irq_context: 0 &pipe->rd_wait &ep->lock &ep->wq irq_context: 0 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &n->list_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem mmu_notifier_invalidate_range_start irq_context: 0 &mm->mmap_lock &anon_vma->rwsem ptlock_ptr(ptdesc)#2 irq_context: 0 &dev_instance->mutex irq_context: 0 &dev_instance->mutex fs_reclaim irq_context: 0 &dev_instance->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev_instance->mutex &c->lock irq_context: 0 &dev_instance->mutex &n->list_lock irq_context: 0 &dev_instance->mutex &n->list_lock &c->lock irq_context: 0 &dev_instance->mutex pool_lock#2 irq_context: 0 &dev_instance->mutex vicodec_core:1851:(hdl)->_lock irq_context: 0 &dev_instance->mutex &vdev->fh_lock irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex &m2m_dev->job_spinlock irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex &q->done_wq irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex &q->mmap_lock irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex &obj_hash[i].lock irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex pool_lock#2 irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock &obj_hash[i].lock irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock pool_lock#2 irq_context: 0 &mdev->req_queue_mutex &pcp->lock &zone->lock irq_context: 0 &mdev->req_queue_mutex &____s->seqcount irq_context: 0 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev_instance->mutex &____s->seqcount irq_context: 0 &mm->mmap_lock remove_cache_srcu &c->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &n->list_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock &ep->wq irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx rcu_read_lock &pipe->rd_wait irq_context: 0 &ep->mtx &obj_hash[i].lock irq_context: 0 kn->active#36 fs_reclaim irq_context: 0 kn->active#36 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#36 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#36 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#36 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#37 fs_reclaim irq_context: 0 kn->active#37 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#37 &c->lock irq_context: 0 kn->active#37 &____s->seqcount irq_context: 0 kn->active#37 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#37 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#37 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 mapping.invalidate_lock#2 batched_entropy_u8.lock irq_context: 0 mapping.invalidate_lock#2 kfence_freelist_lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock irq_context: 0 &sighand->signalfd_wqh irq_context: 0 kn->active#38 fs_reclaim irq_context: 0 kn->active#38 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#38 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#38 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#38 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#38 &c->lock irq_context: 0 mapping.invalidate_lock#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#2 &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss quarantine_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.gp_wq irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &vma->vm_lock->lock rcu_node_0 irq_context: 0 &mm->mmap_lock quarantine_lock irq_context: 0 kn->active#38 &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock quarantine_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &sem->wait_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &vcapture->lock irq_context: 0 &vcapture->lock &q->done_wq irq_context: 0 &vcapture->lock &q->mmap_lock irq_context: 0 &mdev->graph_mutex irq_context: 0 &dev->mutex#3 irq_context: 0 &mdev->req_queue_mutex &dev->mutex#3 irq_context: 0 &mdev->req_queue_mutex &dev->mutex#3 &vdev->fh_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu quarantine_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock key irq_context: 0 &vma->vm_lock->lock rcu_read_lock pcpu_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock percpu_counters_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock pool_lock#2 irq_context: 0 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &base->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 remove_cache_srcu &rq->__lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mdev->req_queue_mutex &dev->mutex#3 &q->done_wq irq_context: 0 &mdev->req_queue_mutex &dev->mutex#3 &q->mmap_lock irq_context: 0 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex key irq_context: 0 &sig->cred_guard_mutex pcpu_lock irq_context: 0 &sig->cred_guard_mutex percpu_counters_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 kn->active#38 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#38 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 rcu_read_lock key irq_context: 0 rcu_read_lock pcpu_lock irq_context: 0 rcu_read_lock percpu_counters_lock irq_context: 0 &p->lock &rq->__lock cpu_asid_lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &p->pi_lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &p->pi_lock &rq->__lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#38 &pcp->lock &zone->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback quarantine_lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu quarantine_lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &c->lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &n->list_lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &lo->lo_mutex irq_context: 0 &disk->open_mutex &lo->lo_mutex irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &rq->__lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex nbd_index_mutex irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock mmu_notifier_invalidate_range_start irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &rq->__lock irq_context: 0 &disk->open_mutex &nbd->config_lock irq_context: 0 &disk->open_mutex &nbd->config_lock &bdev->bd_size_lock irq_context: 0 &disk->open_mutex &nbd->config_lock &q->queue_lock irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(sdp, lock) irq_context: 0 &disk->open_mutex &nbd->config_lock set->srcu irq_context: 0 &disk->open_mutex &nbd->config_lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &disk->open_mutex &nbd->config_lock &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex &nbd->config_lock &x->wait#2 irq_context: 0 &disk->open_mutex &nbd->config_lock &rq->__lock irq_context: 0 &disk->open_mutex &nbd->config_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ACCESS_PRIVATE(ssp->srcu_sup, lock) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex &ACCESS_PRIVATE(ssp->srcu_sup, lock) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ACCESS_PRIVATE(ssp->srcu_sup, lock) irq_context: 0 &disk->open_mutex &nbd->config_lock set->srcu irq_context: 0 &disk->open_mutex &nbd->config_lock pool_lock#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &rq->__lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#30 sb_writers#3 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 &____s->seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#30 sb_writers#3 lock#4 irq_context: 0 &type->s_umount_key#30 sb_writers#3 pool_lock#2 irq_context: 0 &type->s_umount_key#30 sb_writers#3 &mapping->i_private_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 lock#4 &lruvec->lru_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 &xa->xa_lock#9 pool_lock#2 irq_context: 0 &type->s_umount_key#30 sb_writers#3 bit_wait_table + i irq_context: 0 &type->s_umount_key#30 sb_writers#3 &rq->__lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 &eli->li_list_mtx irq_context: 0 &type->s_umount_key#30 sb_writers#3 &journal->j_state_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem pool_lock#2 irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &____s->seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &c->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &base->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &base->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &c->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &n->list_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock pool_lock#2 irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &retval->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &nvmeq->sq_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &____s->seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &x->wait#26 irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &__ctx->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &pool->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &rq->__lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &__ctx->lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &retval->lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &nvmeq->sq_lock irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock &c->lock irq_context: 0 &disk->open_mutex nbd_index_mutex &nbd->config_lock &____s->seqcount irq_context: hardirq &x->wait#26 irq_context: hardirq &x->wait#26 &p->pi_lock irq_context: hardirq &x->wait#26 &p->pi_lock &rq->__lock irq_context: hardirq &x->wait#26 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem (&timer.timer) irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock &q->requeue_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock rcu_read_lock &pool->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) &q->requeue_lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) &hctx->lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &hctx->lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &nvmeq->sq_lock irq_context: hardirq &fq->mq_flush_lock irq_context: hardirq &fq->mq_flush_lock tk_core.seq.seqcount irq_context: hardirq &fq->mq_flush_lock &x->wait#26 irq_context: hardirq &fq->mq_flush_lock &x->wait#26 &p->pi_lock irq_context: hardirq &fq->mq_flush_lock &x->wait#26 &p->pi_lock &rq->__lock irq_context: hardirq &fq->mq_flush_lock &x->wait#26 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &journal->j_wait_updates irq_context: 0 &type->s_umount_key#30 sb_writers#3 &obj_hash[i].lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &rq->__lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss batched_entropy_u8.lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss kfence_freelist_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &meta->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 &disk->open_mutex &nbd->config_lock rcu_read_lock &rq->__lock irq_context: 0 &disk->open_mutex &nbd->config_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss rcu_read_lock rename_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &x->wait#2 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex kn->active#4 &rfkill->lock irq_context: 0 &type->i_mutex_dir_key#2 &n->list_lock &c->lock irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &rq_wait->wait irq_context: 0 &type->i_mutex_dir_key#4 &base->lock irq_context: 0 &type->i_mutex_dir_key#4 &base->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_node_0 irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &c->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem rcu_read_lock rcu_node_0 irq_context: hardirq &rq_wait->wait irq_context: hardirq &rq_wait->wait &p->pi_lock irq_context: hardirq &rq_wait->wait &p->pi_lock &rq->__lock irq_context: hardirq &rq_wait->wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#16 &c->lock irq_context: 0 kn->active#16 &____s->seqcount irq_context: 0 kn->active#16 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#16 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 &disk->open_mutex &new->lock irq_context: 0 kn->active#13 &____s->seqcount irq_context: 0 kn->active#14 &c->lock irq_context: 0 kn->active#14 &pcp->lock &zone->lock irq_context: 0 kn->active#14 &____s->seqcount irq_context: 0 kn->active#13 &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &disk->open_mutex &new->lock &mtdblk->cache_mutex irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &____s->seqcount irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#39 fs_reclaim irq_context: 0 kn->active#39 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#39 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#39 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#39 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &mtd->master.chrdev_lock irq_context: 0 &mtd->master.chrdev_lock &mm->mmap_lock irq_context: 0 &p->lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &type->i_mutex_dir_key#2 rcu_read_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#2 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &u->iolock rcu_read_lock rcu_node_0 irq_context: 0 &u->iolock rcu_read_lock &rq->__lock irq_context: 0 &u->iolock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#4 &rq->__lock irq_context: 0 &p->lock batched_entropy_u8.lock irq_context: 0 &p->lock kfence_freelist_lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu quarantine_lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &c->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &n->list_lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &rq->__lock irq_context: 0 &type->i_mutex_dir_key#2 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#5 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &rq->__lock irq_context: 0 kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock rename_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key/1 remove_cache_srcu &rq->__lock irq_context: 0 kn->active#4 batched_entropy_u8.lock irq_context: 0 kn->active#4 kfence_freelist_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &journal->j_state_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (&journal->j_commit_timer) irq_context: 0 &journal->j_checkpoint_mutex irq_context: 0 &journal->j_checkpoint_mutex mmu_notifier_invalidate_range_start irq_context: 0 &journal->j_checkpoint_mutex pool_lock#2 irq_context: 0 &journal->j_checkpoint_mutex tk_core.seq.seqcount irq_context: 0 &journal->j_checkpoint_mutex rcu_read_lock tk_core.seq.seqcount irq_context: 0 &journal->j_checkpoint_mutex rcu_read_lock &nvmeq->sq_lock irq_context: 0 &journal->j_checkpoint_mutex bit_wait_table + i irq_context: 0 &journal->j_checkpoint_mutex &rq->__lock irq_context: 0 &journal->j_checkpoint_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &journal->j_checkpoint_mutex &journal->j_state_lock irq_context: 0 &journal->j_state_lock &journal->j_wait_updates irq_context: 0 &journal->j_list_lock irq_context: 0 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 &ei->i_es_lock irq_context: 0 &mapping->i_private_lock irq_context: 0 &ret->b_state_lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock irq_context: 0 &ei->i_es_lock key#2 irq_context: 0 &journal->j_state_lock irq_context: 0 &journal->j_state_lock &journal->j_list_lock irq_context: 0 rcu_read_lock &retval->lock irq_context: 0 rcu_read_lock tk_core.seq.seqcount irq_context: 0 rcu_read_lock &nvmeq->sq_lock irq_context: 0 &fq->mq_flush_lock &q->requeue_lock irq_context: 0 &fq->mq_flush_lock &obj_hash[i].lock irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) &__ctx->lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &__ctx->lock irq_context: hardirq &fq->mq_flush_lock bit_wait_table + i irq_context: hardirq &fq->mq_flush_lock bit_wait_table + i &p->pi_lock irq_context: hardirq &fq->mq_flush_lock bit_wait_table + i &p->pi_lock &rq->__lock irq_context: hardirq &fq->mq_flush_lock bit_wait_table + i &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ret->b_state_lock &journal->j_list_lock rcu_read_lock &memcg->move_lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock rcu_read_lock &xa->xa_lock#9 irq_context: 0 &ret->b_state_lock &journal->j_list_lock &sb->s_type->i_lock_key#3 irq_context: 0 &ret->b_state_lock &journal->j_list_lock &wb->list_lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock &wb->list_lock &sb->s_type->i_lock_key#3 irq_context: 0 &ret->b_state_lock &journal->j_list_lock rcu_read_lock &xa->xa_lock#9 key#11 irq_context: 0 &journal->j_state_lock &journal->j_list_lock irq_context: 0 &sbi->s_md_lock irq_context: 0 &journal->j_fc_wait irq_context: 0 &journal->j_history_lock irq_context: 0 &type->i_mutex_dir_key#5 &p->pi_lock irq_context: softirq &(&wb->dwork)->timer irq_context: softirq &(&wb->dwork)->timer rcu_read_lock &pool->lock irq_context: softirq &(&wb->dwork)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&wb->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&wb->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&wb->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &p->sequence irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) key#11 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->list_lock &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->list_lock &type->s_umount_key#30 &sb->s_type->i_lock_key#22 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sb->s_type->i_lock_key#22 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &wb->list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->list_lock &type->s_umount_key#40 &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &memcg->move_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &s->s_inode_wblist_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 tk_core.seq.seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 lock#4 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 lock#4 &lruvec->lru_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 lock#4 &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 lock#5 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock key#11 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 key#11 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &wb->list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &wb->list_lock &sb->s_type->i_lock_key#3 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &retval->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &nvmeq->sq_lock irq_context: hardirq rcu_read_lock &memcg->move_lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 irq_context: hardirq rcu_read_lock &xa->xa_lock#9 &obj_hash[i].lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 &base->lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 &base->lock &obj_hash[i].lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 key#11 irq_context: hardirq rcu_read_lock &xa->xa_lock#9 key#12 irq_context: hardirq rcu_read_lock &xa->xa_lock#9 &wb->work_lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 &wb->work_lock &obj_hash[i].lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 &wb->work_lock &base->lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 &wb->work_lock &base->lock &obj_hash[i].lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 &s->s_inode_wblist_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 batched_entropy_u8.lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 &journal->j_state_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#30 sb_writers#3 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 &journal->j_state_lock &base->lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &ret->b_state_lock irq_context: 0 &type->s_umount_key#30 sb_writers#3 jbd2_handle &meta_group_info[i]->alloc_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 tomoyo_ss remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 &p->lock rcu_read_lock rcu_node_0 irq_context: 0 &p->lock rcu_read_lock &rq->__lock irq_context: 0 &eli->li_list_mtx &obj_hash[i].lock irq_context: 0 &eli->li_list_mtx pool_lock#2 irq_context: 0 ext4_li_mtx irq_context: 0 ext4_li_mtx &eli->li_list_mtx irq_context: 0 ext4_li_mtx &obj_hash[i].lock irq_context: 0 ext4_li_mtx pool_lock#2 irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 &rq->__lock irq_context: softirq &(&wb->bw_dwork)->timer irq_context: softirq &(&wb->bw_dwork)->timer rcu_read_lock &pool->lock irq_context: softirq &(&wb->bw_dwork)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&wb->bw_dwork)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&wb->bw_dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&wb->bw_dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->bw_dwork)->work) irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->bw_dwork)->work) &wb->list_lock irq_context: 0 &p->lock &mm->mmap_lock &rq->__lock irq_context: 0 &p->lock fs_reclaim &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 fs_reclaim &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 rename_lock irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &____s->seqcount#6 irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &____s->seqcount#6 &____s->seqcount#6/1 irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock &dentry->d_lock/2 &dentry->d_lock/3 &wq irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock/2 irq_context: 0 &type->i_mutex_dir_key#4 rename_lock rename_lock.seqcount &dentry->d_lock/2 &dentry->d_lock/3 irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock pool_lock#2 irq_context: 0 kn->active#17 &c->lock irq_context: 0 kn->active#17 &n->list_lock irq_context: 0 kn->active#17 &n->list_lock &c->lock irq_context: 0 &dentry->d_lock &lru->node[i].lock irq_context: 0 kn->active#40 fs_reclaim irq_context: 0 kn->active#40 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#40 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#40 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#40 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu fs_reclaim irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu &group->notification_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu &group->notification_waitq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu &group->notification_waitq &p->pi_lock irq_context: 0 sb_writers#5 &s->s_inode_list_lock irq_context: 0 sb_writers#5 &info->lock irq_context: 0 sb_writers#5 &obj_hash[i].lock irq_context: 0 sb_writers#5 pool_lock#2 irq_context: 0 sb_writers#5 &sbinfo->stat_lock irq_context: 0 sb_writers#5 &xa->xa_lock#9 irq_context: 0 sb_writers#5 &fsnotify_mark_srcu irq_context: 0 sb_writers#5 &obj_hash[i].lock pool_lock irq_context: 0 &mark->lock irq_context: 0 &group->inotify_data.idr_lock irq_context: 0 &group->inotify_data.idr_lock &obj_hash[i].lock irq_context: 0 &group->inotify_data.idr_lock pool_lock#2 irq_context: 0 &conn->lock irq_context: 0 destroy_lock irq_context: 0 fs/notify/mark.c:89 irq_context: 0 (reaper_work).work irq_context: 0 (wq_completion)events_unbound connector_reaper_work irq_context: 0 (reaper_work).work &rq->__lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work destroy_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(sdp, lock) irq_context: 0 (wq_completion)events_unbound connector_reaper_work &fsnotify_mark_srcu irq_context: 0 (wq_completion)events_unbound (reaper_work).work irq_context: 0 (wq_completion)events_unbound connector_reaper_work &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work destroy_lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(sdp, lock) irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &fsnotify_mark_srcu irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (reaper_work).work &x->wait#2 irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rq->__lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rq->__lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &x->wait#10 irq_context: 0 (wq_completion)events_unbound connector_reaper_work &x->wait#2 irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex &rq->__lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work pool_lock#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex &base->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&barr->work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&barr->work) &x->wait#10 irq_context: 0 (wq_completion)events_unbound (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tty->ldisc_sem &tty->atomic_write_lock &tty->termios_rwsem &ldata->output_lock &port_lock_key &dev->power.lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &wb->work_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &wb->work_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &wb->work_lock &base->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &pcp->lock &zone->lock irq_context: softirq (&sdp->delay_work) irq_context: softirq (&sdp->delay_work) rcu_read_lock &pool->lock irq_context: softirq (&sdp->delay_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&sdp->delay_work) rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&sdp->delay_work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&sdp->delay_work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock &x->wait#25 &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock &x->wait#25 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 &c->lock irq_context: 0 &mm->mmap_lock batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock kfence_freelist_lock irq_context: 0 tomoyo_ss tomoyo_policy_lock &n->list_lock irq_context: 0 tomoyo_ss tomoyo_policy_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &meta->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem kfence_freelist_lock irq_context: 0 rtnl_mutex rlock-AF_NETLINK irq_context: 0 rtnl_mutex (inetaddr_validator_chain).rwsem irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex pcpu_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fib_info_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &dir->lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem nl_table_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem nl_table_wait.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rhashtable_bucket irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex batched_entropy_u32.lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex rhashtable_bucket irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex rhashtable_bucket rhashtable_bucket/1 irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 nl_table_lock irq_context: 0 &sb->s_type->i_mutex_key#10 nl_table_wait.lock irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_NETLINK irq_context: 0 &sb->s_type->i_mutex_key#10 &nlk->wait irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem hwsim_radio_lock irq_context: 0 rtnl_mutex _xmit_LOOPBACK irq_context: 0 rtnl_mutex netpoll_srcu irq_context: 0 rtnl_mutex &in_dev->mc_tomb_lock irq_context: 0 rtnl_mutex &im->lock irq_context: 0 rtnl_mutex fib_info_lock irq_context: 0 rtnl_mutex rcu_read_lock &dir->lock#2 irq_context: 0 rtnl_mutex cbs_list_lock irq_context: 0 rtnl_mutex &ndev->lock irq_context: 0 rtnl_mutex &idev->mc_lock irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem irq_context: 0 rtnl_mutex rcu_read_lock &net->ipv6.addrconf_hash_lock irq_context: 0 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock pool_lock#2 irq_context: 0 rtnl_mutex &ifa->lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 rtnl_mutex &tb->tb6_lock irq_context: 0 rtnl_mutex &tb->tb6_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &tb->tb6_lock &____s->seqcount irq_context: 0 rtnl_mutex &tb->tb6_lock &c->lock irq_context: 0 rtnl_mutex &tb->tb6_lock pool_lock#2 irq_context: 0 rtnl_mutex &tb->tb6_lock nl_table_lock irq_context: 0 rtnl_mutex &tb->tb6_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &tb->tb6_lock nl_table_wait.lock irq_context: 0 rtnl_mutex &ndev->lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock irq_context: 0 &mm->mmap_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 batched_entropy_u8.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 kfence_freelist_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu quarantine_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &c->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &n->list_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_callback &dir->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &meta->lock irq_context: softirq rcu_callback &dir->lock#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mapping.invalidate_lock &xa->xa_lock#9 &n->list_lock irq_context: 0 mapping.invalidate_lock &xa->xa_lock#9 &n->list_lock &c->lock irq_context: 0 mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET6 irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_INET6 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &xa->xa_lock#9 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 lock#4 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &nvmeq->sq_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 bit_wait_table + i irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 inode_hash_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 inode_hash_lock &s->s_inode_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->xattr_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &dentry->d_lock &lru->node[i].lock irq_context: 0 sb_writers#5 tomoyo_ss irq_context: 0 sb_writers#5 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers#5 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#5 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#5 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#5 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#5 &xattrs->lock irq_context: 0 userns_state_mutex irq_context: 0 vmap_purge_lock free_vmap_area_lock &meta->lock irq_context: 0 vmap_purge_lock free_vmap_area_lock kfence_freelist_lock irq_context: 0 mapping.invalidate_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &mapping->i_mmap_rwsem irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &sb->s_type->i_lock_key &xa->xa_lock#9 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 lock#5 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &lruvec->lru_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &obj_hash[i].lock irq_context: 0 &f->f_pos_lock sb_writers#5 irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 &mm->mmap_lock irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 fs_reclaim irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 &____s->seqcount irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 &xa->xa_lock#9 irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 &sb->s_type->i_lock_key irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 &info->lock irq_context: 0 &f->f_pos_lock sb_writers#5 &sb->s_type->i_mutex_key#13 lock#4 irq_context: 0 &sb->s_type->i_lock_key#4 irq_context: 0 &sb->s_type->i_lock_key#4 &dentry->d_lock irq_context: 0 sk_lock-AF_UNIX irq_context: 0 sk_lock-AF_UNIX slock-AF_UNIX irq_context: 0 slock-AF_UNIX irq_context: 0 cgroup_threadgroup_rwsem &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem key irq_context: 0 cgroup_threadgroup_rwsem pcpu_lock irq_context: 0 cgroup_threadgroup_rwsem percpu_counters_lock irq_context: 0 cgroup_threadgroup_rwsem pool_lock#2 irq_context: 0 cgroup_threadgroup_rwsem &cfs_rq->removed.lock irq_context: 0 vmap_purge_lock &rq->__lock irq_context: 0 vmap_purge_lock &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_INET irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 irq_context: 0 slock-AF_INET#2 irq_context: 0 sk_lock-AF_INET6 irq_context: 0 sk_lock-AF_INET6 slock-AF_INET6 irq_context: 0 slock-AF_INET6 irq_context: 0 sk_lock-AF_INET &table->hash[i].lock irq_context: 0 sk_lock-AF_INET &table->hash[i].lock clock-AF_INET irq_context: 0 sk_lock-AF_INET &table->hash[i].lock &table->hash2[i].lock irq_context: 0 sk_lock-AF_INET6 &table->hash[i].lock irq_context: 0 sk_lock-AF_INET6 &table->hash[i].lock clock-AF_INET6 irq_context: 0 sk_lock-AF_INET6 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 vmap_purge_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 quarantine_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss &c->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 vmap_purge_lock vmap_purge_lock.wait_lock irq_context: 0 sk_lock-AF_NETLINK &mm->mmap_lock irq_context: 0 sk_lock-AF_NETLINK fs_reclaim irq_context: 0 sk_lock-AF_NETLINK fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_NETLINK pool_lock#2 irq_context: 0 sk_lock-AF_NETLINK free_vmap_area_lock irq_context: 0 sk_lock-AF_NETLINK free_vmap_area_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_NETLINK free_vmap_area_lock pool_lock#2 irq_context: 0 sk_lock-AF_NETLINK vmap_area_lock irq_context: 0 sk_lock-AF_NETLINK &____s->seqcount irq_context: 0 sk_lock-AF_NETLINK pcpu_alloc_mutex irq_context: 0 sk_lock-AF_NETLINK pcpu_alloc_mutex pcpu_lock irq_context: 0 sk_lock-AF_NETLINK &obj_hash[i].lock irq_context: 0 sk_lock-AF_NETLINK batched_entropy_u32.lock irq_context: 0 sk_lock-AF_NETLINK vmap_purge_lock irq_context: 0 sk_lock-AF_NETLINK vmap_purge_lock purge_vmap_area_lock irq_context: 0 sk_lock-AF_NETLINK &fp->aux->used_maps_mutex irq_context: 0 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#41 fs_reclaim irq_context: 0 kn->active#41 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#41 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#41 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#41 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &n->list_lock irq_context: 0 rtnl_mutex &n->list_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &ndev->lock &ifa->lock irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &wq#2 irq_context: 0 vlan_ioctl_mutex &mm->mmap_lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex irq_context: 0 cb_lock irq_context: 0 cb_lock genl_mutex irq_context: 0 cb_lock genl_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex &c->lock irq_context: 0 cb_lock genl_mutex &n->list_lock irq_context: 0 cb_lock genl_mutex &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rlock-AF_NETLINK irq_context: 0 cb_lock genl_mutex &obj_hash[i].lock irq_context: 0 cb_lock fs_reclaim irq_context: 0 cb_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock pool_lock#2 irq_context: 0 cb_lock rlock-AF_NETLINK irq_context: 0 cb_lock rtnl_mutex irq_context: 0 cb_lock &obj_hash[i].lock irq_context: 0 cb_lock &c->lock irq_context: 0 cb_lock &n->list_lock irq_context: 0 cb_lock &n->list_lock &c->lock irq_context: 0 dev_addr_sem irq_context: 0 sb_writers#4 tomoyo_ss quarantine_lock irq_context: 0 sb_writers#4 remove_cache_srcu irq_context: 0 sb_writers#4 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#4 remove_cache_srcu &c->lock irq_context: 0 sb_writers#4 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#4 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#4 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock &____s->seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx irq_context: 0 cb_lock &rdev->wiphy.mtx irq_context: 0 cb_lock &rdev->wiphy.mtx fs_reclaim irq_context: 0 cb_lock &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock &rdev->wiphy.mtx pool_lock#2 irq_context: 0 cb_lock &rdev->wiphy.mtx &c->lock irq_context: 0 cb_lock &rdev->wiphy.mtx &____s->seqcount irq_context: 0 cb_lock &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 cb_lock &rdev->wiphy.mtx rlock-AF_NETLINK irq_context: 0 cb_lock nlk_cb_mutex-GENERIC irq_context: 0 cb_lock nlk_cb_mutex-GENERIC fs_reclaim irq_context: 0 cb_lock nlk_cb_mutex-GENERIC fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock nlk_cb_mutex-GENERIC pool_lock#2 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &____s->seqcount irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rtnl_mutex irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rtnl_mutex &rdev->wiphy.mtx irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rdev->wiphy.mtx irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rdev->wiphy.mtx pool_lock#2 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &rdev->wiphy.mtx &rdev->bss_lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC rlock-AF_NETLINK irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &n->list_lock &c->lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &c->lock irq_context: 0 cb_lock genl_mutex &____s->seqcount irq_context: 0 cb_lock quarantine_lock irq_context: 0 cb_lock remove_cache_srcu irq_context: 0 cb_lock remove_cache_srcu quarantine_lock irq_context: 0 cb_lock remove_cache_srcu &c->lock irq_context: 0 cb_lock remove_cache_srcu &n->list_lock irq_context: 0 cb_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 cb_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &u->iolock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &u->iolock &mm->mmap_lock fs_reclaim irq_context: 0 &u->iolock &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &u->iolock &mm->mmap_lock &____s->seqcount irq_context: 0 &u->iolock &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 &u->iolock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &u->iolock &meta->lock irq_context: 0 &u->iolock kfence_freelist_lock irq_context: 0 sb_writers#4 &c->lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &n->list_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->alloc_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock key#6 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 lock#4 &lruvec->lru_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &pcp->lock &zone->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock lock#4 &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &n->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 sb_writers#5 fs_reclaim irq_context: 0 sb_writers#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &sb->s_type->i_lock_key &xa->xa_lock#9 irq_context: 0 sb_writers#5 lock#4 irq_context: 0 sb_writers#5 lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#5 lock#5 irq_context: 0 sb_writers#5 &lruvec->lru_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock quarantine_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &mm->mmap_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 vmap_area_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &dentry->d_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 sb_writers#5 mount_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 sb_writers#5 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 sb_writers#5 &sb->s_type->i_lock_key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 sb_writers#5 &wb->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 sb_writers#5 &wb->list_lock &sb->s_type->i_lock_key irq_context: 0 &pipe->mutex/1 &pcp->lock &zone->lock irq_context: softirq (&net->sctp.addr_wq_timer) irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 kfence_freelist_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock batched_entropy_u64.lock crngs.lock base_crng.lock irq_context: softirq &(&ovs_net->masks_rebalance)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&ovs_net->masks_rebalance)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &pcp->lock &zone->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &____s->seqcount irq_context: 0 lock pidmap_lock &n->list_lock irq_context: 0 lock pidmap_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key irq_context: 0 rtnl_mutex &dev_addr_list_lock_key pool_lock#2 irq_context: 0 rtnl_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 rtnl_mutex uevent_sock_mutex &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &pnettable->lock irq_context: 0 rtnl_mutex smc_ib_devices.mutex irq_context: 0 rtnl_mutex napi_hash_lock irq_context: 0 rtnl_mutex lapb_list_lock irq_context: 0 rtnl_mutex x25_neigh_list_lock irq_context: 0 rtnl_mutex console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex console_lock console_srcu console_owner irq_context: 0 rtnl_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)events (work_completion)(&aux->work) irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_area_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) purge_vmap_area_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock purge_vmap_area_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock free_vmap_area_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock free_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock free_vmap_area_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) pcpu_lock irq_context: 0 rtnl_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &u->lock &ei->socket.wq.wait irq_context: 0 rtnl_mutex _xmit_ETHER irq_context: 0 rtnl_mutex &tb->tb6_lock rlock-AF_NETLINK irq_context: 0 rtnl_mutex _xmit_SLIP irq_context: 0 &root->kernfs_rwsem &sem->wait_lock irq_context: 0 rtnl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 rtnl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: softirq (&eql->timer) irq_context: softirq (&eql->timer) &eql->queue.lock irq_context: softirq (&eql->timer) &obj_hash[i].lock irq_context: softirq (&eql->timer) &base->lock irq_context: softirq (&eql->timer) &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 jbd2_handle &rq->__lock irq_context: 0 rtnl_mutex free_vmap_area_lock irq_context: 0 rtnl_mutex vmap_area_lock irq_context: 0 rtnl_mutex init_mm.page_table_lock irq_context: 0 rtnl_mutex &cma->lock irq_context: 0 rtnl_mutex cma_mutex irq_context: 0 rtnl_mutex cma_mutex &zone->lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cma_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex cma_mutex lock#2 irq_context: 0 rtnl_mutex cma_mutex lock#2 &obj_hash[i].lock irq_context: 0 rtnl_mutex cma_mutex lock#2 rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex cma_mutex lock#2 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex cma_mutex lock#2 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex cma_mutex lock#2 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex cma_mutex lock#2 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex cma_mutex lock#2 &rq->__lock irq_context: 0 rtnl_mutex cma_mutex lock#2 (work_completion)(work) irq_context: 0 rtnl_mutex cma_mutex &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex cpu_hotplug_lock xps_map_mutex irq_context: 0 rtnl_mutex cpu_hotplug_lock xps_map_mutex fs_reclaim irq_context: 0 rtnl_mutex cpu_hotplug_lock xps_map_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex cpu_hotplug_lock xps_map_mutex pool_lock#2 irq_context: 0 rtnl_mutex cpu_hotplug_lock xps_map_mutex jump_label_mutex irq_context: 0 rtnl_mutex cpu_hotplug_lock xps_map_mutex jump_label_mutex patch_lock irq_context: 0 rtnl_mutex cma_mutex rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex &zone->lock irq_context: 0 rtnl_mutex rcu_node_0 irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) (console_sem).lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) console_lock console_srcu console_owner irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) lweventlist_lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) &rq->__lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) &obj_hash[i].lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) &base->lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) pool_lock#2 irq_context: 0 (wq_completion)gve (work_completion)(&priv->service_task) &dir->lock#2 irq_context: 0 &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 rtnl_mutex remove_cache_srcu irq_context: 0 rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex cbs_list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tn->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fs_reclaim irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock rlock-AF_NETLINK irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &tb->tb6_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rlock-AF_NETLINK irq_context: 0 rtnl_mutex &rfkill->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &rq->__lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lweventlist_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lweventlist_lock pool_lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &dir->lock#2 irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &base->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &data->mutex irq_context: 0 rtnl_mutex _xmit_ETHER &local->filter_lock irq_context: 0 rtnl_mutex _xmit_ETHER &local->filter_lock pool_lock#2 irq_context: 0 rtnl_mutex _xmit_ETHER &rdev->wiphy_work_lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &local->filter_lock irq_context: 0 rtnl_mutex &sem->wait_lock irq_context: 0 &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &type->i_mutex_dir_key#4 &sem->wait_lock irq_context: 0 &type->i_mutex_dir_key#4 &p->pi_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &root->kernfs_rwsem &p->pi_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex _xmit_VOID irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_node_0 irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rq->__lock irq_context: 0 &u->iolock &u->lock irq_context: 0 rtnl_mutex _xmit_X25 irq_context: 0 rtnl_mutex lapb_list_lock irq_context: 0 rtnl_mutex lapb_list_lock pool_lock#2 irq_context: 0 rtnl_mutex lapb_list_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex lapb_list_lock &base->lock irq_context: 0 rtnl_mutex lapb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &lapbeth->up_lock irq_context: 0 rtnl_mutex &lapb->lock irq_context: 0 rtnl_mutex &lapb->lock &c->lock irq_context: 0 rtnl_mutex &lapb->lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &lapb->lock &____s->seqcount irq_context: 0 rtnl_mutex &lapb->lock pool_lock#2 irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh pool_lock#2 irq_context: 0 rtnl_mutex &lapb->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &lapb->lock &base->lock irq_context: 0 rtnl_mutex &lapb->lock &base->lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->xattr_sem &rq->__lock irq_context: 0 &type->i_mutex_dir_key#3 &ei->xattr_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss fs_reclaim &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss batched_entropy_u8.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss batched_entropy_u8.lock crngs.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss kfence_freelist_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &meta->lock irq_context: 0 &pipe->rd_wait &p->pi_lock &rq->__lock irq_context: 0 &pipe->rd_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 mapping.invalidate_lock &obj_hash[i].lock irq_context: 0 mapping.invalidate_lock key irq_context: 0 mapping.invalidate_lock pcpu_lock irq_context: 0 mapping.invalidate_lock percpu_counters_lock irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 rtnl_mutex &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->xattr_sem irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 tk_core.seq.seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &journal->j_state_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &wb->list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &mm->mmap_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 fs_reclaim irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &____s->seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 lock#4 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &mapping->i_private_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &(ei->i_block_reservation_lock) irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &memcg->move_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &xa->xa_lock#9 irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &obj_hash[i].lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock_bh &base->lock irq_context: 0 rtnl_mutex rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex nl_table_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex nl_table_wait.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock nl_table_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rlock-AF_NETLINK irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock nl_table_wait.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock batched_entropy_u32.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &tb->tb6_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: hardirq bit_wait_table + i &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rq->__lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rq->__lock &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock &n->list_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock &n->list_lock &c->lock irq_context: 0 &pipe->rd_wait &p->pi_lock &cfs_rq->removed.lock irq_context: softirq rcu_callback rcu_read_lock rt6_exception_lock irq_context: hardirq &folio_wait_table[i] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET6 batched_entropy_u32.lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &____s->seqcount irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &c->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &dir->lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &ul->lock irq_context: 0 sk_lock-AF_INET6 &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 pool_lock#2 irq_context: 0 sk_lock-AF_INET6 batched_entropy_u16.lock irq_context: 0 &sb->s_type->i_mutex_key#10 &table->hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#3 &n->list_lock irq_context: 0 &type->i_mutex_dir_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rename_lock.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &dentry->d_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &s->s_inode_list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tk_core.seq.seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &p->alloc_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq#2 irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock clock-AF_INET irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock clock-AF_INET irq_context: 0 sk_lock-AF_INET &h->lhash2[i].lock irq_context: 0 sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock irq_context: 0 sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock clock-AF_INET6 irq_context: 0 sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: 0 sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock clock-AF_INET6 irq_context: 0 sk_lock-AF_INET6 &h->lhash2[i].lock irq_context: 0 &u->iolock &dir->lock irq_context: softirq rcu_callback &ul->lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &pcp->lock &zone->lock irq_context: 0 &tty->legacy_mutex &tty->ldisc_sem irq_context: 0 &tty->legacy_mutex &f->f_lock irq_context: 0 &tty->legacy_mutex &f->f_lock fasync_lock irq_context: 0 &tty->legacy_mutex &obj_hash[i].lock irq_context: 0 &tty->legacy_mutex pool_lock#2 irq_context: 0 &tty->legacy_mutex tasklist_lock irq_context: 0 &tty->legacy_mutex tasklist_lock &sighand->siglock irq_context: 0 &tty->legacy_mutex tasklist_lock &sighand->siglock &tty->ctrl.lock irq_context: 0 rcu_read_lock &tty->ctrl.lock irq_context: 0 &tty->ctrl.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &dentry->d_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &dentry->d_lock &wq#2 irq_context: 0 &port_lock_key irq_context: 0 &buf->lock irq_context: 0 &tty->ldisc_sem rcu_read_lock &tty->ctrl.lock irq_context: 0 &tty->ldisc_sem &port_lock_key irq_context: 0 &tty->ldisc_sem &port->lock irq_context: 0 &tty->ldisc_sem &tty->termios_rwsem &tty->ldisc_sem &tty->flow.lock irq_context: 0 tomoyo_ss tomoyo_policy_lock &rq->__lock irq_context: 0 &net->packet.sklist_lock irq_context: 0 sk_lock-AF_PACKET irq_context: 0 sk_lock-AF_PACKET slock-AF_PACKET irq_context: softirq &tx->clean_lock irq_context: softirq &tx->clean_lock &obj_hash[i].lock irq_context: softirq &tx->clean_lock pool_lock#2 irq_context: 0 sk_lock-AF_PACKET &po->bind_lock irq_context: 0 sk_lock-AF_PACKET &po->bind_lock ptype_lock irq_context: 0 sk_lock-AF_PACKET &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET &x->wait#3 irq_context: 0 sk_lock-AF_PACKET &rq->__lock irq_context: 0 sk_lock-AF_PACKET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tty->ldisc_sem &ldata->atomic_read_lock irq_context: 0 &tty->ldisc_sem &ldata->atomic_read_lock &tty->termios_rwsem irq_context: 0 &tty->ldisc_sem &ldata->atomic_read_lock &tty->termios_rwsem &tty->read_wait irq_context: 0 &tty->ldisc_sem &ldata->atomic_read_lock (work_completion)(&buf->work) irq_context: 0 &tty->ldisc_sem &ldata->atomic_read_lock &rq->__lock irq_context: 0 &tty->ldisc_sem &ldata->atomic_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&idev->mc_dad_work)->timer irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock &dir->lock#2 irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock ptype_lock irq_context: 0 slock-AF_PACKET irq_context: 0 sk_lock-AF_PACKET &mm->mmap_lock irq_context: 0 sk_lock-AF_PACKET fs_reclaim irq_context: 0 sk_lock-AF_PACKET fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_PACKET &c->lock irq_context: 0 sk_lock-AF_PACKET pool_lock#2 irq_context: 0 sk_lock-AF_PACKET free_vmap_area_lock irq_context: 0 sk_lock-AF_PACKET free_vmap_area_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET vmap_area_lock irq_context: 0 sk_lock-AF_PACKET &____s->seqcount irq_context: 0 sk_lock-AF_PACKET pcpu_alloc_mutex irq_context: 0 sk_lock-AF_PACKET pcpu_alloc_mutex pcpu_lock irq_context: 0 sk_lock-AF_PACKET batched_entropy_u32.lock irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock purge_vmap_area_lock irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock free_vmap_area_lock irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock free_vmap_area_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock free_vmap_area_lock pool_lock#2 irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock irq_context: 0 sk_lock-AF_PACKET vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET &fp->aux->used_maps_mutex irq_context: 0 rlock-AF_PACKET irq_context: 0 wlock-AF_PACKET irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 rtnl_mutex lapb_list_lock &c->lock irq_context: 0 rtnl_mutex lapb_list_lock &n->list_lock irq_context: 0 rtnl_mutex lapb_list_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &c->lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock fs_reclaim irq_context: 0 &pipe->mutex/1 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &____s->seqcount irq_context: 0 &pipe->mutex/1 &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &meta->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock kfence_freelist_lock irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock crngs.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock &base->lock &obj_hash[i].lock irq_context: 0 cb_lock &rdev->wiphy.mtx &pcp->lock &zone->lock irq_context: 0 cb_lock &rdev->wiphy.mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock &rdev->wiphy.mtx quarantine_lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &pcp->lock &zone->lock irq_context: 0 rtnl_mutex class irq_context: 0 rtnl_mutex (&tbl->proxy_timer) irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &rdev->wiphy_work_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock &obj_hash[i].lock irq_context: softirq &(&idev->mc_ifc_work)->timer irq_context: softirq &(&idev->mc_ifc_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&idev->mc_ifc_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&idev->mc_ifc_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&idev->mc_ifc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&idev->mc_ifc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fs_reclaim irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &ul->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &n->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock batched_entropy_u32.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &base->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: softirq rcu_callback &ul->lock#2 irq_context: softirq rcu_callback &x->wait#3 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex quarantine_lock irq_context: 0 sb_writers#3 &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock kfence_freelist_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq &tx->clean_lock &meta->lock irq_context: softirq &tx->clean_lock kfence_freelist_lock irq_context: softirq net/core/link_watch.c:31 irq_context: softirq net/core/link_watch.c:31 rcu_read_lock &pool->lock irq_context: softirq net/core/link_watch.c:31 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq net/core/link_watch.c:31 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq net/core/link_watch.c:31 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq net/core/link_watch.c:31 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &dev->tx_global_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &dev->tx_global_lock _xmit_ETHER#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &dev->tx_global_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock noop_qdisc.q.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &sch->q.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex class irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex (&tbl->proxy_timer) irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &n->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &n->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rlock-AF_NETLINK irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rcu_read_lock lock#8 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock rcu_read_lock id_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock krc.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: hardirq &dev->power.lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 &pipe->mutex/1 &pcp->lock &zone->lock &____s->seqcount irq_context: softirq &(&idev->mc_dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &net->ipv6.addrconf_hash_lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock krc.lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &rdev->wiphy_work_lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &idev->mc_lock pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock &base->lock irq_context: 0 rtnl_mutex &idev->mc_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock &dir->lock irq_context: 0 rtnl_mutex rcu_read_lock krc.lock irq_context: 0 rtnl_mutex &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 rtnl_mutex &tb->tb6_lock rt6_exception_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &dir->lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->packet.sklist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &po->bind_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &po->bind_lock ptype_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &po->bind_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 &po->bind_lock &dir->lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PACKET irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PACKET slock-AF_PACKET irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_PACKET irq_context: 0 &sb->s_type->i_mutex_key#10 fanout_mutex irq_context: 0 &sb->s_type->i_mutex_key#10 &x->wait#3 irq_context: 0 &sb->s_type->i_mutex_key#10 &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_PACKET irq_context: 0 &sb->s_type->i_mutex_key#10 pcpu_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 sk_lock-AF_INET6 fs_reclaim irq_context: 0 sk_lock-AF_INET6 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_INET6 &mm->mmap_lock irq_context: 0 sk_lock-AF_INET6 once_lock irq_context: 0 sk_lock-AF_INET6 once_lock crngs.lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &pool->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &tbl->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &n->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &____s->seqcount#8 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: softirq (&dom->period_timer) irq_context: softirq (&dom->period_timer) key#13 irq_context: softirq (&dom->period_timer) &p->sequence irq_context: softirq (&dom->period_timer) &obj_hash[i].lock irq_context: softirq (&dom->period_timer) &base->lock irq_context: softirq (&dom->period_timer) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock kfence_freelist_lock irq_context: softirq &tx->clean_lock rcu_read_lock &ei->socket.wq.wait irq_context: softirq rcu_read_lock rlock-AF_PACKET irq_context: softirq rcu_read_lock rcu_read_lock &ei->socket.wq.wait irq_context: softirq rcu_read_lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &cfs_rq->removed.lock irq_context: softirq rcu_read_lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock once_lock irq_context: softirq rcu_read_lock rcu_read_lock once_lock crngs.lock irq_context: softirq rcu_read_lock rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq rcu_read_lock rcu_read_lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: softirq rcu_read_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &____s->seqcount irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock krc.lock irq_context: 0 &mm->mmap_lock quarantine_lock irq_context: 0 &vma->vm_lock->lock &lruvec->lru_lock irq_context: 0 &vma->vm_lock->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock kfence_freelist_lock irq_context: softirq &(&ifa->dad_work)->timer irq_context: softirq &(&ifa->dad_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&ifa->dad_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&ifa->dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&ifa->dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&ifa->dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ul->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &n->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &rq->__lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &rq->__lock &base->lock irq_context: 0 rcu_read_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: softirq &c->lock batched_entropy_u8.lock irq_context: softirq &c->lock kfence_freelist_lock irq_context: softirq rcu_read_lock rcu_read_lock &ct->lock irq_context: softirq rcu_read_lock once_lock irq_context: softirq rcu_read_lock once_lock crngs.lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq drivers/base/dd.c:321 irq_context: softirq drivers/base/dd.c:321 rcu_read_lock &pool->lock irq_context: softirq drivers/base/dd.c:321 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq drivers/base/dd.c:321 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq drivers/base/dd.c:321 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq drivers/base/dd.c:321 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work device_links_lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work device_links_lock &k->list_lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work device_links_lock &k->k_lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work deferred_probe_mutex irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work &rq->__lock irq_context: 0 (wq_completion)events (deferred_probe_timeout_work).work deferred_probe_work irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &ul->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock &sighand->siglock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock batched_entropy_u8.lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock kfence_freelist_lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock pool_lock#2 irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &sighand->siglock &meta->lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sighand->siglock kfence_freelist_lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ep->mtx rcu_read_lock &sighand->signalfd_wqh irq_context: 0 &ep->mtx rcu_read_lock &ei->socket.wq.wait irq_context: 0 rcu_read_lock rcu_read_lock key irq_context: 0 rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &pl->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &pl->lock key#12 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock &base->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock fs_reclaim irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock kfence_freelist_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &ul->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq (&journal->j_commit_timer) &p->pi_lock irq_context: 0 rcu_read_lock &base->lock irq_context: 0 rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock key#14 irq_context: softirq (&lapb->t1timer) irq_context: softirq (&lapb->t1timer) &lapb->lock irq_context: softirq (&lapb->t1timer) &lapb->lock batched_entropy_u8.lock irq_context: softirq (&lapb->t1timer) &lapb->lock kfence_freelist_lock irq_context: softirq (&lapb->t1timer) &lapb->lock pool_lock#2 irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh pool_lock#2 irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh &meta->lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh kfence_freelist_lock irq_context: softirq (&lapb->t1timer) &lapb->lock &obj_hash[i].lock irq_context: softirq (&lapb->t1timer) &lapb->lock &base->lock irq_context: softirq (&lapb->t1timer) &lapb->lock &base->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock &____s->seqcount#9 irq_context: 0 sk_lock-AF_INET6 &c->lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh &____s->seqcount irq_context: softirq (&dev->watchdog_timer) irq_context: softirq (&dev->watchdog_timer) &dev->tx_global_lock irq_context: softirq (&dev->watchdog_timer) &dev->tx_global_lock &obj_hash[i].lock irq_context: softirq (&dev->watchdog_timer) &dev->tx_global_lock &base->lock irq_context: softirq (&dev->watchdog_timer) &dev->tx_global_lock &base->lock &obj_hash[i].lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &meta->lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 kfence_freelist_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: softirq (&lapb->t1timer) &lapb->lock &c->lock irq_context: softirq (&lapb->t1timer) &lapb->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle batched_entropy_u32.lock crngs.lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rlock-AF_NETLINK irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &dir->lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex mapping.invalidate_lock &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock pool_lock irq_context: 0 tasklist_lock &sighand->siglock batched_entropy_u8.lock irq_context: 0 tasklist_lock &sighand->siglock kfence_freelist_lock irq_context: 0 sb_writers#5 &dentry->d_lock irq_context: 0 sb_writers#5 tomoyo_ss &c->lock irq_context: 0 sb_writers#5 tomoyo_ss &____s->seqcount irq_context: softirq &(&tbl->managed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 hostname_poll.wait.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PACKET rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_PACKET rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PACKET &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_PACKET key irq_context: 0 sk_lock-AF_PACKET pcpu_lock irq_context: 0 sk_lock-AF_PACKET percpu_counters_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &rq->__lock irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock batched_entropy_u8.lock crngs.lock irq_context: 0 rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem batched_entropy_u8.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem kfence_freelist_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem lock#4 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem lock#4 &lruvec->lru_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem lock#5 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &meta->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &pcp->lock &zone->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &journal->j_state_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle lock#4 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle lock#5 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_es_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &ei->i_prealloc_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex lock#4 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &mapping->i_private_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &ret->b_state_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &ret->b_state_lock &journal->j_list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &journal->j_revoke_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &pa->pa_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex &lg->lg_prealloc_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &(ei->i_block_reservation_lock) irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &memcg->move_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &xa->xa_lock#9 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &xa->xa_lock#9 &s->s_inode_wblist_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle lock#4 &lruvec->lru_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_raw_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &journal->j_wait_updates irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem tk_core.seq.seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &base->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &wb->work_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &wb->work_lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &wb->work_lock &base->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &lg->lg_mutex rcu_read_lock &pa->pa_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: hardirq &ei->i_completed_io_lock irq_context: hardirq &ei->i_completed_io_lock rcu_read_lock &pool->lock irq_context: hardirq &ei->i_completed_io_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: hardirq &ei->i_completed_io_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: hardirq &ei->i_completed_io_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: hardirq &ei->i_completed_io_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: hardirq &ei->i_completed_io_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &ei->i_completed_io_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &journal->j_state_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &journal->j_state_lock &journal->j_wait_reserved irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_es_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_raw_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &journal->j_wait_updates irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) pool_lock#2 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &ext4__ioend_wq[i] irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &ret->b_uptodate_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &memcg->move_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &wb->work_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &s->s_inode_wblist_lock irq_context: softirq rcu_read_lock rcu_read_lock batched_entropy_u8.lock irq_context: softirq rcu_read_lock rcu_read_lock kfence_freelist_lock irq_context: softirq rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: softirq rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: softirq rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: softirq rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock once_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock once_lock crngs.lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock batched_entropy_u32.lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock batched_entropy_u32.lock crngs.lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &base->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &hashinfo->ehash_locks[i] irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &ct->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &tbl->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->lock &base->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->lock &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &n->lock irq_context: softirq rcu_read_lock &n->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &n->lock &base->lock irq_context: softirq rcu_read_lock &n->lock &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &n->lock &(&n->ha_lock)->lock irq_context: softirq rcu_read_lock &n->lock &(&n->ha_lock)->lock &____s->seqcount#8 irq_context: softirq rcu_read_lock rcu_read_lock &n->lock irq_context: softirq rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock lock#8 irq_context: softirq rcu_read_lock rcu_read_lock id_table_lock irq_context: softirq rcu_read_lock &n->lock irq_context: softirq rcu_read_lock &n->lock &____s->seqcount#8 irq_context: softirq rcu_read_lock nl_table_lock irq_context: softirq rcu_read_lock rlock-AF_NETLINK irq_context: softirq rcu_read_lock rcu_read_lock &dir->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 batched_entropy_u16.lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &tcp_hashinfo.bhash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &hashinfo->ehash_locks[i] irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 (&req->rsk_timer) irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &base->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 &icsk->icsk_accept_queue.rskq_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 rcu_read_lock &ei->socket.wq.wait irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock &ct->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET &icsk->icsk_accept_queue.rskq_lock irq_context: 0 sk_lock-AF_INET clock-AF_INET irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sk_lock-AF_INET &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET &base->lock irq_context: 0 sk_lock-AF_INET &base->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET fs_reclaim irq_context: 0 sk_lock-AF_INET fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_INET &____s->seqcount irq_context: 0 sk_lock-AF_INET &c->lock irq_context: 0 sk_lock-AF_INET pool_lock#2 irq_context: 0 sk_lock-AF_INET &mm->mmap_lock irq_context: 0 sk_lock-AF_INET tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &ct->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET &sd->defer_lock irq_context: softirq &sd->defer_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &c->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &base->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &base->lock &obj_hash[i].lock irq_context: 0 &u->iolock rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 &u->iolock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: softirq (&icsk->icsk_delack_timer) irq_context: softirq (&icsk->icsk_retransmit_timer) irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 tk_core.seq.seqcount irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 pool_lock#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &ct->lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &n->list_lock &c->lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_state_lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &journal->j_list_lock &obj_hash[i].lock irq_context: 0 &journal->j_list_lock pool_lock#2 irq_context: 0 sb_writers#3 jbd2_handle bit_wait_table + i irq_context: 0 sb_writers#3 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xa->xa_lock#9 pool_lock#2 irq_context: hardirq &fq->mq_flush_lock bit_wait_table + i &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) &journal->j_state_lock irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle pool_lock#2 irq_context: 0 &u->iolock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle &ret->b_state_lock irq_context: 0 &u->iolock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle key#3 irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle key#4 irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle &sbi->s_error_lock irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) jbd2_handle &journal->j_wait_updates irq_context: 0 (wq_completion)events (work_completion)(&sbi->s_sb_upd_work) &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &pcp->lock &zone->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 sk_lock-AF_INET &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 sk_lock-AF_INET &mm->mmap_lock fs_reclaim irq_context: 0 sk_lock-AF_INET &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_INET &mm->mmap_lock &____s->seqcount irq_context: 0 sk_lock-AF_INET &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_INET &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &type->i_mutex_dir_key#3 &ei->i_data_sem &ei->i_es_lock key#6 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock &ct->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &base->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET clock-AF_INET irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock tcp_metrics_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock tcp_metrics_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock tcp_metrics_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &hashinfo->ehash_locks[i] irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &tcp_hashinfo.bhash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: softirq (&lapb->t1timer) &lapb->lock &n->list_lock irq_context: softirq (&lapb->t1timer) &lapb->lock &n->list_lock &c->lock irq_context: softirq rcu_callback uidhash_lock irq_context: softirq rcu_callback percpu_counters_lock irq_context: softirq rcu_callback ucounts_lock irq_context: 0 sk_lock-AF_INET &pcp->lock &zone->lock irq_context: softirq &(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 &c->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock batched_entropy_u8.lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock kfence_freelist_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &meta->lock irq_context: 0 &pipe->wr_wait irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 pool_lock#2 irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock &ct->lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 &pipe->mutex/1 &obj_hash[i].lock irq_context: softirq &tx->clean_lock quarantine_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &c->lock irq_context: 0 sk_lock-AF_INET &n->list_lock irq_context: 0 sk_lock-AF_INET &n->list_lock &c->lock irq_context: 0 &pipe->wr_wait &p->pi_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_es_lock key#5 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &xa->xa_lock#9 key#11 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_es_lock key#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &(ei->i_block_reservation_lock) key#15 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sk_lock-AF_INET batched_entropy_u8.lock irq_context: 0 sk_lock-AF_INET kfence_freelist_lock irq_context: 0 sk_lock-AF_INET &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &pipe->wr_wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &rq->__lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &____s->seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &rq->__lock irq_context: 0 sk_lock-AF_INET6 &____s->seqcount irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock pool_lock#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &ul->lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock lock#4 &obj_hash[i].lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu irq_context: 0 &vma->vm_lock->lock remove_cache_srcu quarantine_lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &c->lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &n->list_lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &pipe->mutex/1 &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &ul->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock &ul->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 jbd2_handle &mapping->i_private_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &c->lock irq_context: 0 &sig->cred_guard_mutex quarantine_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 tomoyo_ss &cfs_rq->removed.lock irq_context: 0 &pipe->wr_wait &p->pi_lock &rq->__lock irq_context: 0 &pipe->wr_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock irq_context: 0 &vma->vm_lock->lock batched_entropy_u8.lock irq_context: 0 &vma->vm_lock->lock kfence_freelist_lock irq_context: 0 &mm->mmap_lock &meta->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 batched_entropy_u8.lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 kfence_freelist_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_node_0 irq_context: 0 &pipe->mutex/1 rcu_read_lock rcu_node_0 irq_context: 0 &pipe->mutex/1 rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 &n->list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_INET &rq->__lock irq_context: softirq rcu_read_lock &stopper->lock irq_context: softirq rcu_read_lock &stop_pi_lock irq_context: softirq rcu_read_lock &stop_pi_lock &rq->__lock irq_context: softirq rcu_read_lock &stop_pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &rq->__lock rcu_read_lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &rq->__lock rcu_read_lock &cfs_rq->removed.lock irq_context: softirq slock-AF_INET#2 tk_core.seq.seqcount irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock &ct->lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq slock-AF_INET#2 &obj_hash[i].lock irq_context: softirq slock-AF_INET#2 &base->lock irq_context: softirq slock-AF_INET#2 &base->lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sk_lock-AF_INET &mm->mmap_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &pcp->lock &zone->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &____s->seqcount irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 tk_core.seq.seqcount irq_context: 0 &pipe->wr_wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem quarantine_lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu irq_context: 0 sk_lock-AF_INET remove_cache_srcu quarantine_lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &n->list_lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &c->lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: hardirq allocation_wait.lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 quarantine_lock irq_context: 0 sb_writers#6 &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_node_0 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &pcp->lock &zone->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 tomoyo_ss irq_context: 0 sb_writers#3 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 tomoyo_ss &c->lock irq_context: 0 sb_writers#3 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#3 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers#3 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#3 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#3 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#3 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#3 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &mapping->i_mmap_rwsem irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &journal->j_state_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &sbi->s_orphan_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET6 rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &c->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 tasklist_lock &sighand->siglock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &journal->j_list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_callback &x->wait#3 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_PACKET &po->bind_lock rcu_read_lock &c->lock irq_context: 0 &rq->__lock &obj_hash[i].lock irq_context: 0 &rq->__lock &base->lock irq_context: 0 &rq->__lock &base->lock &obj_hash[i].lock irq_context: softirq (&journal->j_commit_timer) &p->pi_lock &rq->__lock irq_context: softirq (&journal->j_commit_timer) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 lock#5 irq_context: softirq (&lapb->t1timer) &lapb->lock batched_entropy_u8.lock crngs.lock irq_context: softirq (&lapb->t1timer) &lapb->lock batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: softirq (&wq_watchdog_timer) &obj_hash[i].lock irq_context: softirq (&wq_watchdog_timer) &base->lock irq_context: softirq (&wq_watchdog_timer) &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock batched_entropy_u8.lock crngs.lock irq_context: softirq rcu_read_lock rcu_read_lock batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET#2 batched_entropy_u16.lock crngs.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock &base->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 batched_entropy_u8.lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 kfence_freelist_lock irq_context: 0 kn->active#42 fs_reclaim irq_context: 0 kn->active#42 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#42 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#42 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#42 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rcu_read_lock &sighand->siglock pool_lock#2 irq_context: 0 rcu_read_lock &sighand->siglock &p->pi_lock irq_context: 0 &futex_queues[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &____s->seqcount irq_context: 0 rcu_read_lock &sighand->siglock batched_entropy_u8.lock irq_context: 0 rcu_read_lock &sighand->siglock kfence_freelist_lock irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &sem->wait_lock irq_context: 0 rcu_read_lock &sighand->siglock &c->lock irq_context: 0 rcu_read_lock &sighand->siglock &____s->seqcount irq_context: 0 &ep->mtx &ep->lock &ep->wq irq_context: 0 &ep->mtx &ep->lock &ep->wq &p->pi_lock irq_context: 0 &ep->mtx &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &ep->mtx &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx &lock->wait_lock irq_context: 0 &ep->mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &p->lock irq_context: 0 &f->f_pos_lock &p->lock fs_reclaim irq_context: 0 &f->f_pos_lock &p->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &p->lock &c->lock irq_context: 0 &f->f_pos_lock &p->lock &mm->mmap_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 clock-AF_INET6 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &tcp_hashinfo.bhash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 &tcp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 &ep->mtx sysctl_lock irq_context: 0 &f->f_pos_lock sysctl_lock irq_context: 0 &f->f_pos_lock fs_reclaim irq_context: 0 &f->f_pos_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &zone->lock irq_context: 0 &f->f_pos_lock &____s->seqcount irq_context: 0 &f->f_pos_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &zone->lock &____s->seqcount irq_context: 0 sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rq->__lock irq_context: 0 kn->active#5 &c->lock irq_context: 0 &ep->mtx kn->active#5 fs_reclaim irq_context: 0 &ep->mtx kn->active#5 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ep->mtx kn->active#5 pool_lock#2 irq_context: 0 &ep->mtx kn->active#5 &on->poll irq_context: 0 &f->f_pos_lock &p->lock &of->mutex irq_context: 0 &f->f_pos_lock &p->lock &of->mutex kn->active#5 param_lock irq_context: 0 &ep->mtx rcu_read_lock &on->poll irq_context: 0 &f->f_pos_lock &p->lock &mm->mmap_lock fs_reclaim irq_context: 0 &f->f_pos_lock &p->lock &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &p->lock &mm->mmap_lock &____s->seqcount irq_context: 0 &f->f_pos_lock &p->lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &f->f_pos_lock &p->lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 kn->active#5 &n->list_lock irq_context: 0 kn->active#5 &n->list_lock &c->lock irq_context: 0 kn->active#5 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 &f->f_pos_lock &p->lock &n->list_lock irq_context: 0 &f->f_pos_lock &p->lock &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &p->lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 &f->f_pos_lock &p->lock pool_lock#2 irq_context: 0 &f->f_pos_lock &p->lock module_mutex irq_context: 0 sk_lock-AF_INET once_mutex irq_context: 0 sk_lock-AF_INET once_mutex crngs.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &pool->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &____s->seqcount irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &c->lock irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &____s->seqcount irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &c->lock irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock batched_entropy_u8.lock irq_context: 0 sk_lock-AF_INET &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &hashinfo->ehash_locks[i] irq_context: 0 sk_lock-AF_INET batched_entropy_u32.lock irq_context: 0 sk_lock-AF_INET batched_entropy_u16.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &meta->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 kfence_freelist_lock irq_context: softirq &(&wb->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: softirq &(&dm_bufio_cleanup_old_work)->timer irq_context: softirq &(&dm_bufio_cleanup_old_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&dm_bufio_cleanup_old_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&dm_bufio_cleanup_old_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&dm_bufio_cleanup_old_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&dm_bufio_cleanup_old_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)dm_bufio_cache irq_context: 0 (wq_completion)dm_bufio_cache (work_completion)(&(&dm_bufio_cleanup_old_work)->work) irq_context: 0 (wq_completion)dm_bufio_cache (work_completion)(&(&dm_bufio_cleanup_old_work)->work) dm_bufio_clients_lock irq_context: 0 (wq_completion)dm_bufio_cache (work_completion)(&(&dm_bufio_cleanup_old_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)dm_bufio_cache (work_completion)(&(&dm_bufio_cleanup_old_work)->work) &base->lock irq_context: 0 (wq_completion)dm_bufio_cache (work_completion)(&(&dm_bufio_cleanup_old_work)->work) &base->lock &obj_hash[i].lock irq_context: softirq drivers/regulator/core.c:6335 irq_context: softirq drivers/regulator/core.c:6335 rcu_read_lock &pool->lock irq_context: softirq drivers/regulator/core.c:6335 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq drivers/regulator/core.c:6335 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq drivers/regulator/core.c:6335 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq drivers/regulator/core.c:6335 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (regulator_init_complete_work).work irq_context: 0 (wq_completion)events (regulator_init_complete_work).work &k->list_lock irq_context: 0 (wq_completion)events (regulator_init_complete_work).work &k->k_lock irq_context: softirq &(&tbl->gc_work)->timer irq_context: softirq &(&tbl->gc_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&tbl->gc_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&tbl->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: softirq &(&tbl->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&tbl->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &base->lock &obj_hash[i].lock irq_context: 0 &ep->mtx &pipe->wr_wait irq_context: 0 rcu_read_lock tasklist_lock irq_context: 0 &ep->mtx rcu_read_lock &pipe->wr_wait irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 sysctl_lock irq_context: 0 sb_writers#4 &p->pi_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &pcp->lock &zone->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &s->s_inode_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->xattr_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_revoke_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle inode_hash_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle batched_entropy_u32.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &xa->xa_lock#9 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &xa->xa_lock#9 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem lock#4 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &journal->j_revoke_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &wb->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem lock#4 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle lock#4 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_wait_updates irq_context: 0 &type->s_umount_key#41/1 irq_context: 0 &type->s_umount_key#41/1 fs_reclaim irq_context: 0 &type->s_umount_key#41/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#41/1 pool_lock#2 irq_context: 0 &type->s_umount_key#41/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#41/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#41/1 shrinker_mutex irq_context: 0 &type->s_umount_key#41/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#41/1 sb_lock irq_context: 0 &type->s_umount_key#41/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem inode_hash_lock irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem fs_reclaim irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem pool_lock#2 irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#30 irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_rwsem &sb->s_type->i_lock_key#30 irq_context: 0 &type->s_umount_key#41/1 &c->lock irq_context: 0 &type->s_umount_key#41/1 &n->list_lock irq_context: 0 &type->s_umount_key#41/1 &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#41/1 &sb->s_type->i_lock_key#30 irq_context: 0 &type->s_umount_key#41/1 &sb->s_type->i_lock_key#30 &dentry->d_lock irq_context: 0 &type->s_umount_key#41/1 crngs.lock irq_context: 0 &type->s_umount_key#41/1 &root->kernfs_supers_rwsem irq_context: 0 &type->s_umount_key#41/1 &dentry->d_lock irq_context: 0 sb_writers#8 irq_context: 0 sb_writers#8 mount_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 tk_core.seq.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem iattr_mutex irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem iattr_mutex fs_reclaim irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem iattr_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem iattr_mutex &____s->seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem iattr_mutex &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem iattr_mutex pool_lock#2 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_iattr_rwsem iattr_mutex tk_core.seq.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 rename_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 fs_reclaim irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &dentry->d_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem inode_hash_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem fs_reclaim irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem &____s->seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#30 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem &sb->s_type->i_lock_key#30 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &sb->s_type->i_lock_key#30 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &sb->s_type->i_lock_key#30 &dentry->d_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &sb->s_type->i_lock_key#30 &dentry->d_lock &wq#2 irq_context: 0 kn->active#43 fs_reclaim irq_context: 0 kn->active#43 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#43 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#43 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#43 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#43 pool_lock#2 irq_context: 0 sb_writers#8 fs_reclaim irq_context: 0 sb_writers#8 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &mm->mmap_lock irq_context: 0 sb_writers#8 &of->mutex irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex css_set_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock css_set_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#8 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 cgroup_mutex cpu_hotplug_lock css_set_lock irq_context: 0 cgroup_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 cgroup_mutex css_set_lock cgroup_file_kn_lock irq_context: 0 &type->s_umount_key#42/1 irq_context: 0 &type->s_umount_key#42/1 fs_reclaim irq_context: 0 &type->s_umount_key#42/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#42/1 pool_lock#2 irq_context: 0 &type->s_umount_key#42/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#42/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#42/1 shrinker_mutex irq_context: 0 &type->s_umount_key#42/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#42/1 sb_lock irq_context: 0 &type->s_umount_key#42/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem inode_hash_lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem fs_reclaim irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem pool_lock#2 irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#31 irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem &sb->s_type->i_lock_key#31 irq_context: 0 &type->s_umount_key#42/1 &sb->s_type->i_lock_key#31 irq_context: 0 &type->s_umount_key#42/1 &sb->s_type->i_lock_key#31 &dentry->d_lock irq_context: 0 &type->s_umount_key#42/1 crngs.lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_supers_rwsem irq_context: 0 &type->s_umount_key#42/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#43 irq_context: 0 &type->s_umount_key#43 &x->wait#23 irq_context: 0 &type->s_umount_key#43 shrinker_mutex irq_context: 0 &type->s_umount_key#43 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#43 percpu_ref_switch_lock irq_context: 0 &type->s_umount_key#43 percpu_ref_switch_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#43 percpu_ref_switch_lock pool_lock#2 irq_context: 0 &type->s_umount_key#43 &root->kernfs_supers_rwsem irq_context: 0 &type->s_umount_key#43 rename_lock.seqcount irq_context: 0 &type->s_umount_key#43 &dentry->d_lock irq_context: 0 &type->s_umount_key#43 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#43 &sb->s_type->i_lock_key#31 irq_context: 0 &type->s_umount_key#43 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#43 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#43 inode_hash_lock irq_context: 0 &type->s_umount_key#43 inode_hash_lock &sb->s_type->i_lock_key#31 irq_context: 0 &type->s_umount_key#43 pool_lock#2 irq_context: 0 &type->s_umount_key#43 &fsnotify_mark_srcu irq_context: 0 &type->s_umount_key#43 &dentry->d_lock/1 irq_context: 0 cgroup_mutex &n->list_lock irq_context: 0 cgroup_mutex &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#42/1 &c->lock irq_context: 0 rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem tk_core.seq.seqcount irq_context: 0 cgroup_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 cgroup_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 cgroup_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: hardirq log_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cgroup_mutex &x->wait#3 irq_context: 0 cgroup_mutex &rq->__lock irq_context: 0 cgroup_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback percpu_ref_switch_waitq.lock irq_context: softirq rcu_callback rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_callback rcu_read_lock rcu_read_lock &pool->lock irq_context: softirq rcu_callback rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_callback rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq rcu_callback rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_callback rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_callback rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex cgroup_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 cgroup_mutex.wait_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem &c->lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem &n->list_lock irq_context: 0 &type->i_mutex_dir_key#3 namespace_sem &n->list_lock &c->lock irq_context: 0 rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) cgroup_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) percpu_ref_switch_lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&cgrp->bpf.release_work) rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[2] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)cgroup_destroy irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) cgroup_mutex irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) cgroup_mutex cgroup_rstat_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) cgroup_mutex cgroup_rstat_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) cgroup_mutex css_set_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&css->destroy_work) pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) percpu_ref_switch_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &cgrp->pidlist_mutex irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) (wq_completion)cgroup_pidlist_destroy irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &wq->mutex irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &wq->mutex &pool->lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &wq->mutex &x->wait#10 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) (work_completion)(&cgrp->release_agent_work) irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex css_set_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex cpu_hotplug_lock css_set_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex css_set_lock &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex css_set_lock pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_rstat_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_rstat_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) pcpu_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &root->kernfs_rwsem irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) kernfs_idr_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) krc.lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &x->wait#3 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &rq->__lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_mutex cgroup_mutex.wait_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex fs_reclaim irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex pool_lock#2 irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &c->lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex lock kernfs_idr_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem tk_core.seq.seqcount irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex.wait_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &p->pi_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) krc.lock &base->lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) krc.lock &base->lock &obj_hash[i].lock irq_context: 0 cgroup_mutex lock kernfs_idr_lock &c->lock irq_context: 0 sb_writers#9 irq_context: 0 sb_writers#9 mount_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tomoyo_ss tomoyo_policy_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 tk_core.seq.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_iattr_rwsem iattr_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_iattr_rwsem iattr_mutex fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_iattr_rwsem iattr_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_iattr_rwsem iattr_mutex tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 cgroup_mutex cpuset_mutex irq_context: 0 cgroup_mutex cpuset_mutex callback_lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem &c->lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#42/1 &root->kernfs_rwsem &____s->seqcount irq_context: 0 cgroup_mutex &dom->lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex cpuset_mutex irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex cpuset_mutex callback_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &dom->lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &n->list_lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) cgroup_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)cgroup_destroy (work_completion)(&(&css->destroy_rwork)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 rename_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &dentry->d_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem inode_hash_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#31 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &sb->s_type->i_lock_key#31 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &sb->s_type->i_lock_key#31 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &sb->s_type->i_lock_key#31 &dentry->d_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &sb->s_type->i_lock_key#31 &dentry->d_lock &wq#2 irq_context: 0 kn->active#44 fs_reclaim irq_context: 0 kn->active#44 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#44 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#44 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#44 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 fs_reclaim irq_context: 0 sb_writers#9 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &mm->mmap_lock irq_context: 0 sb_writers#9 &of->mutex irq_context: 0 sb_writers#9 &obj_hash[i].lock irq_context: 0 kn->active#45 fs_reclaim irq_context: 0 kn->active#45 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#45 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#45 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#45 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &of->mutex kn->active#45 cpu_hotplug_lock irq_context: 0 sb_writers#9 &of->mutex kn->active#45 cpu_hotplug_lock cpuset_mutex irq_context: 0 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#10 irq_context: 0 sb_writers#10 &mm->mmap_lock irq_context: 0 sb_writers#10 &attr->mutex irq_context: 0 sb_writers#10 &attr->mutex &mm->mmap_lock irq_context: 0 &type->s_umount_key#44 irq_context: 0 &type->s_umount_key#44 sb_lock irq_context: 0 &type->s_umount_key#44 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#16 irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem rename_lock irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem mount_lock irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem mount_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem mount_lock rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem mount_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#16 namespace_sem mount_lock pool_lock#2 irq_context: 0 &sb->s_type->i_lock_key#26 irq_context: 0 sb_writers#11 irq_context: 0 sb_writers#11 fs_reclaim irq_context: 0 sb_writers#11 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#11 pool_lock#2 irq_context: 0 sb_writers#11 &mm->mmap_lock irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 rename_lock.seqcount irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 fs_reclaim irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &c->lock irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 pool_lock#2 irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &dentry->d_lock irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &dentry->d_lock &wq irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &sb->s_type->i_lock_key#26 irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &s->s_inode_list_lock irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 tk_core.seq.seqcount irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 &sb->s_type->i_lock_key#26 &dentry->d_lock irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 init_binfmt_misc.entries_lock irq_context: 0 sb_writers#11 &sb->s_type->i_mutex_key#16 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &wpan_dev->association_lock irq_context: 0 rtnl_mutex dev_addr_sem irq_context: 0 rtnl_mutex dev_addr_sem net_rwsem irq_context: 0 rtnl_mutex dev_addr_sem net_rwsem &list->lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &tn->lock irq_context: 0 rtnl_mutex dev_addr_sem &sdata->sec_mtx irq_context: 0 rtnl_mutex dev_addr_sem &sdata->sec_mtx &sec->lock irq_context: 0 rtnl_mutex dev_addr_sem fs_reclaim irq_context: 0 rtnl_mutex dev_addr_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex dev_addr_sem &c->lock irq_context: 0 rtnl_mutex dev_addr_sem pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem nl_table_lock irq_context: 0 rtnl_mutex dev_addr_sem rlock-AF_NETLINK irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &ei->socket.wq.wait irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &ei->socket.wq.wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem nl_table_wait.lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock irq_context: 0 rtnl_mutex dev_addr_sem &pn->hash_lock irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock rcu_read_lock &tb->tb6_lock irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem input_pool.lock irq_context: 0 rtnl_mutex _xmit_IEEE802154 irq_context: 0 rtnl_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &xa->xa_lock#16 irq_context: 0 &sb->s_type->i_mutex_key#10 genl_sk_destructing_waitq.lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rdev->beacon_registrations_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rdev->mgmt_registrations_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &wdev->pmsr_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem reg_indoor_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 fs_reclaim irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &dentry->d_lock &wq#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &____s->seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem key#15 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &wb->list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &sbi->s_md_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem lock#4 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &sbi->s_md_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem key#3 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &k->list_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &k->k_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &genl_data->genl_data_mutex irq_context: 0 (wq_completion)events (work_completion)(&w->w) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) pool_lock#2 irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock irq_context: 0 &wb->list_lock irq_context: 0 &sbi->s_writepages_rwsem irq_context: 0 &sbi->s_writepages_rwsem &xa->xa_lock#9 irq_context: 0 &sbi->s_writepages_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &sbi->s_writepages_rwsem pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem lock#4 irq_context: 0 &sbi->s_writepages_rwsem lock#4 &lruvec->lru_lock irq_context: 0 &sbi->s_writepages_rwsem lock#5 irq_context: 0 &sbi->s_writepages_rwsem &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem &c->lock irq_context: 0 &sbi->s_writepages_rwsem &pcp->lock &zone->lock irq_context: 0 &sbi->s_writepages_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sbi->s_writepages_rwsem &____s->seqcount irq_context: 0 &sbi->s_writepages_rwsem &journal->j_state_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle lock#4 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle lock#5 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_es_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &memcg->move_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &xa->xa_lock#9 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &xa->xa_lock#9 &s->s_inode_wblist_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &journal->j_wait_updates irq_context: 0 &sbi->s_writepages_rwsem tk_core.seq.seqcount irq_context: 0 &sbi->s_writepages_rwsem &base->lock irq_context: 0 &sbi->s_writepages_rwsem &base->lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &nvmeq->sq_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &base->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &wb->work_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &wb->work_lock &base->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &folio_wait_table[i] irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &folio_wait_table[i] &p->pi_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &folio_wait_table[i] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 jbd2_handle irq_context: 0 &journal->j_wait_commit irq_context: 0 &journal->j_wait_done_commit &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#8 irq_context: 0 &sb->s_type->i_mutex_key#8 fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#8 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 free_vmap_area_lock irq_context: 0 &sb->s_type->i_mutex_key#8 free_vmap_area_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 free_vmap_area_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 vmap_area_lock irq_context: 0 &sb->s_type->i_mutex_key#8 &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#8 init_mm.page_table_lock irq_context: 0 &sb->s_type->i_mutex_key#8 pcpu_alloc_mutex irq_context: 0 &sb->s_type->i_mutex_key#8 pcpu_alloc_mutex pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#8 batched_entropy_u32.lock irq_context: 0 &sb->s_type->i_mutex_key#8 swap_cgroup_mutex irq_context: 0 &sb->s_type->i_mutex_key#8 swap_cgroup_mutex fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#8 swap_cgroup_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 swap_cgroup_mutex &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#8 &journal->j_state_lock irq_context: 0 &sb->s_type->i_mutex_key#8 tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock &q->requeue_lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock rcu_read_lock &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#8 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#8 &x->wait#26 irq_context: 0 &sb->s_type->i_mutex_key#8 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 &base->lock irq_context: 0 &sb->s_type->i_mutex_key#8 &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: hardirq &fq->mq_flush_lock &x->wait#26 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#8 key irq_context: 0 &sb->s_type->i_mutex_key#8 pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#8 percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#8 (&timer.timer) irq_context: 0 &sb->s_type->i_mutex_key#8 &ei->i_es_lock irq_context: 0 &sb->s_type->i_mutex_key#8 &((cluster_info + ci)->lock)/1 irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex swap_lock irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex swap_lock &p->lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex percpu_ref_switch_lock irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex swap_lock &p->lock#2 swap_avail_lock irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex (console_sem).lock irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex console_lock console_srcu console_owner_lock irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex console_lock console_srcu console_owner irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 &sb->s_type->i_mutex_key#8 swapon_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &sb->s_type->i_mutex_key#8 proc_poll_wait.lock irq_context: 0 swap_slots_cache_enable_mutex irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex cpuhp_state-down irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex cpuhp_state-up irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex &x->wait#6 irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex &rq->__lock irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cpu_hotplug_lock cpuhp_state-up swap_slots_cache_mutex irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock &rq->__lock irq_context: 0 swap_slots_cache_enable_mutex cpu_hotplug_lock cpuhp_state_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 swap_slots_cache_enable_mutex swap_lock irq_context: 0 &____s->seqcount#4 irq_context: 0 &pipe->rd_wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &sighand->siglock &p->pi_lock &rq->__lock irq_context: softirq &(&wb->bw_dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 &sb->s_type->i_lock_key &dentry->d_lock irq_context: 0 sb_writers#5 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sb->s_type->i_lock_key#22 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &ei->i_es_lock key#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss tomoyo_policy_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock rcu_node_0 irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss tomoyo_policy_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &meta->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock sb_writers#5 mount_lock irq_context: 0 &mm->mmap_lock sb_writers#5 tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock sb_writers#5 &sb->s_type->i_lock_key irq_context: 0 &mm->mmap_lock sb_writers#5 &wb->list_lock irq_context: 0 &mm->mmap_lock sb_writers#5 &wb->list_lock &sb->s_type->i_lock_key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &bgl->locks[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rq->__lock irq_context: 0 &newf->file_lock &newf->resize_wait irq_context: 0 &kcov->lock irq_context: 0 &mm->mmap_lock &kcov->lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &n->list_lock &c->lock irq_context: 0 &kcov->lock kcov_remote_lock irq_context: 0 &kcov->lock kcov_remote_lock pool_lock#2 irq_context: 0 pid_caches_mutex irq_context: 0 pid_caches_mutex slab_mutex irq_context: 0 pid_caches_mutex slab_mutex fs_reclaim irq_context: 0 pid_caches_mutex slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pid_caches_mutex slab_mutex pool_lock#2 irq_context: 0 pid_caches_mutex slab_mutex pcpu_alloc_mutex irq_context: 0 pid_caches_mutex slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 pid_caches_mutex slab_mutex &root->kernfs_rwsem irq_context: 0 pid_caches_mutex slab_mutex &k->list_lock irq_context: 0 pid_caches_mutex slab_mutex lock irq_context: 0 pid_caches_mutex slab_mutex lock kernfs_idr_lock irq_context: 0 pid_caches_mutex slab_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 pid_caches_mutex slab_mutex &c->lock irq_context: 0 pid_caches_mutex slab_mutex &n->list_lock irq_context: 0 pid_caches_mutex slab_mutex &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#45 irq_context: 0 &type->s_umount_key#45 sb_lock irq_context: 0 &type->s_umount_key#45 &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#17 irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem rcu_read_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem fs_reclaim irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem rename_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem rename_lock rename_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem rename_lock rename_lock.seqcount &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem mount_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem mount_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem mount_lock rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem mount_lock &obj_hash[i].lock irq_context: 0 bt_proto_lock &sk->sk_peer_lock irq_context: 0 bt_proto_lock hci_sk_list.lock irq_context: 0 misc_mtx &base->lock irq_context: 0 misc_mtx &base->lock &obj_hash[i].lock irq_context: 0 (work_completion)(&(&data->open_timeout)->work) irq_context: 0 &data->open_mutex irq_context: 0 &data->open_mutex fs_reclaim irq_context: 0 &data->open_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex pool_lock#2 irq_context: 0 &data->open_mutex &pcp->lock &zone->lock irq_context: 0 &data->open_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &data->open_mutex &____s->seqcount irq_context: 0 &data->open_mutex &obj_hash[i].lock irq_context: 0 &data->open_mutex &x->wait#9 irq_context: 0 &data->open_mutex hci_index_ida.xa_lock irq_context: 0 &data->open_mutex &c->lock irq_context: 0 &data->open_mutex &n->list_lock irq_context: 0 &data->open_mutex &n->list_lock &c->lock irq_context: 0 &data->open_mutex pcpu_alloc_mutex irq_context: 0 &data->open_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 &data->open_mutex cpu_hotplug_lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 &data->open_mutex wq_pool_mutex irq_context: 0 &data->open_mutex wq_pool_mutex &wq->mutex irq_context: 0 &data->open_mutex pin_fs_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 &data->open_mutex &k->list_lock irq_context: 0 &data->open_mutex gdp_mutex irq_context: 0 &data->open_mutex gdp_mutex &k->list_lock irq_context: 0 &data->open_mutex gdp_mutex fs_reclaim irq_context: 0 &data->open_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex gdp_mutex pool_lock#2 irq_context: 0 &data->open_mutex gdp_mutex lock irq_context: 0 &data->open_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 &data->open_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 &data->open_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &data->open_mutex lock irq_context: 0 &data->open_mutex lock kernfs_idr_lock irq_context: 0 &data->open_mutex &root->kernfs_rwsem irq_context: 0 &data->open_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &data->open_mutex bus_type_sem irq_context: 0 &data->open_mutex sysfs_symlink_target_lock irq_context: 0 &data->open_mutex &root->kernfs_rwsem irq_context: 0 &data->open_mutex &dev->power.lock irq_context: 0 &data->open_mutex dpm_list_mtx irq_context: 0 &data->open_mutex uevent_sock_mutex irq_context: 0 &data->open_mutex uevent_sock_mutex fs_reclaim irq_context: 0 &data->open_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 &data->open_mutex uevent_sock_mutex nl_table_lock irq_context: 0 &data->open_mutex uevent_sock_mutex &c->lock irq_context: 0 &data->open_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 &data->open_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 &data->open_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &data->open_mutex subsys mutex#74 irq_context: 0 &data->open_mutex subsys mutex#74 &k->k_lock irq_context: 0 &data->open_mutex &dev->devres_lock irq_context: 0 &data->open_mutex triggers_list_lock irq_context: 0 &data->open_mutex leds_list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex irq_context: 0 &data->open_mutex rfkill_global_mutex fs_reclaim irq_context: 0 &data->open_mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex rfkill_global_mutex pool_lock#2 irq_context: 0 &data->open_mutex rfkill_global_mutex &k->list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex lock irq_context: 0 &data->open_mutex rfkill_global_mutex lock kernfs_idr_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &data->open_mutex rfkill_global_mutex bus_type_sem irq_context: 0 &data->open_mutex rfkill_global_mutex sysfs_symlink_target_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex lock kernfs_idr_lock &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 &data->open_mutex rfkill_global_mutex &dev->power.lock irq_context: 0 &data->open_mutex rfkill_global_mutex dpm_list_mtx irq_context: 0 &data->open_mutex rfkill_global_mutex &rfkill->lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex nl_table_lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &data->open_mutex rfkill_global_mutex &obj_hash[i].lock irq_context: 0 &data->open_mutex rfkill_global_mutex &k->k_lock irq_context: 0 &data->open_mutex rfkill_global_mutex subsys mutex#40 irq_context: 0 &data->open_mutex rfkill_global_mutex subsys mutex#40 &k->k_lock irq_context: 0 &data->open_mutex rfkill_global_mutex triggers_list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex leds_list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex &rfkill->lock irq_context: 0 &data->open_mutex hci_dev_list_lock irq_context: 0 &data->open_mutex tk_core.seq.seqcount irq_context: 0 &data->open_mutex hci_sk_list.lock irq_context: 0 &data->open_mutex (pm_chain_head).rwsem irq_context: 0 &data->open_mutex rcu_read_lock &pool->lock irq_context: 0 &data->open_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &data->open_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &data->open_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &rq->__lock irq_context: 0 &data->open_mutex &list->lock#5 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->managed_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &q->blkcg_mutex &obj_hash[i].lock pool_lock irq_context: 0 &data->open_mutex &data->read_wait irq_context: 0 &data->open_mutex rfkill_global_mutex triggers_list_lock &rq->__lock irq_context: 0 &list->lock#5 irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock percpu_counters_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 kn->active#4 &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (console_sem).lock irq_context: 0 misc_mtx &wq->mutex irq_context: softirq rcu_callback rcu_read_lock &q->mq_freeze_wq &p->pi_lock irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem &rq->__lock irq_context: softirq rcu_callback rcu_read_lock &q->mq_freeze_wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->rd_wait &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 &pipe->rd_wait &p->pi_lock &rq->__lock &base->lock irq_context: 0 &pipe->rd_wait &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI slock-AF_BLUETOOTH-BTPROTO_HCI irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI sock_cookie_ida.xa_lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &p->alloc_lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI pool_lock#2 irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI tk_core.seq.seqcount irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI hci_sk_list.lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &obj_hash[i].lock irq_context: 0 slock-AF_BLUETOOTH-BTPROTO_HCI irq_context: 0 hci_dev_list_lock irq_context: 0 &list->lock#7 irq_context: 0 &data->read_wait irq_context: 0 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override pool_lock irq_context: 0 &data->open_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#12 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &n->list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#12 mount_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&barr->work) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) quarantine_lock irq_context: 0 fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&barr->work) &x->wait#10 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex rfkill_global_mutex &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex.wait_lock irq_context: 0 &data->open_mutex &p->pi_lock irq_context: 0 &data->open_mutex &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &rq->__lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ipvlan->addrs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ipvlan->addrs_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ipvlan->addrs_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ipvlan->addrs_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &xa->xa_lock#18 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &app->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&app->join_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&app->periodic_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &list->lock#10 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) lock pidmap_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem key irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem hwsim_netgroup_ida.xa_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rdma_nets_rwsem irq_context: hardirq log_wait.lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rdma_nets_rwsem rdma_nets.xa_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem devices_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-clock-AF_NETLINK irq_context: 0 &journal->j_state_lock &journal->j_wait_transaction_locked &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &nlk->wait irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rdma_nets.xa_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&app->join_timer)#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &app->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &list->lock#11 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&priv->scan_result)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&priv->scan_result)->work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&priv->scan_result)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&priv->connect)->work) irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&hsr->prune_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&hsr->announce_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (console_sem).lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex console_lock console_srcu console_owner irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 syslog_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &meta->lock irq_context: 0 &data->open_mutex rfkill_global_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 &data->open_mutex rfkill_global_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex &pcp->lock &zone->lock irq_context: 0 &data->open_mutex rfkill_global_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &data->open_mutex rfkill_global_mutex &____s->seqcount irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &x->wait#3 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &sighand->siglock &base->lock irq_context: 0 &sighand->siglock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &ndev->lock batched_entropy_u8.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex &ndev->lock kfence_freelist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 fill_pool_map-wait-type-override &rq->__lock cpu_asid_lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rq->__lock cpu_asid_lock irq_context: softirq rcu_callback rcu_read_lock &q->mq_freeze_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->w) &base->lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &wq->mutex &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_QIPCRTR &rq->__lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#6 &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work pool_lock irq_context: softirq rcu_callback put_task_map-wait-type-override stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &n->list_lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &n->list_lock &c->lock irq_context: 0 &pool->lock &x->wait#10 irq_context: 0 &pool->lock &x->wait#10 &p->pi_lock irq_context: 0 &pool->lock &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 &pool->lock &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock irq_context: 0 &hdev->req_lock &c->lock irq_context: 0 &hdev->req_lock &____s->seqcount irq_context: 0 &hdev->req_lock pool_lock#2 irq_context: 0 &hdev->req_lock &list->lock#6 irq_context: 0 &hdev->req_lock &list->lock#7 irq_context: 0 &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rq->__lock &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->req_wait_q irq_context: 0 &hdev->req_lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock &base->lock irq_context: 0 &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hsr->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bat_priv->tt.commit_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex ptype_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &entry->crc_lock irq_context: 0 &hdev->req_lock (&timer.timer) irq_context: 0 cgroup_threadgroup_rwsem &sighand->siglock irq_context: 0 cgroup_threadgroup_rwsem &sighand->siglock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &____s->seqcount irq_context: 0 &f->f_pos_lock &p->lock &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim stock_lock irq_context: 0 &f->f_pos_lock &p->lock &____s->seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key#2 stock_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock rcu_node_0 irq_context: 0 &mdev->req_queue_mutex &dev->mutex#3 &q->mmap_lock &rq->__lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 key irq_context: 0 ebt_mutex &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override key irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override pcpu_lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu rcu_node_0 irq_context: 0 &vma->vm_lock->lock remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 loop_validate_mutex &lo->lo_mutex &lock->wait_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex stock_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex key irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex percpu_counters_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex pcpu_lock stock_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim pcpu_lock stock_lock irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem &rq->__lock irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock init_task.mems_allowed_seq.seqcount irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override percpu_counters_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock key irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock pcpu_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem pool_lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override pcpu_lock stock_lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock percpu_counters_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &mm->mmap_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PACKET &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PACKET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock &sighand->siglock &sighand->signalfd_wqh &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock kfence_freelist_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 &u->iolock rcu_node_0 irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex console_owner_lock irq_context: 0 cb_lock genl_mutex console_owner irq_context: 0 sb_writers &cfs_rq->removed.lock irq_context: 0 sb_writers pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &sighand->siglock &pcp->lock &zone->lock irq_context: 0 misc_mtx pcpu_alloc_mutex.wait_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock &base->lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI slock-AF_BLUETOOTH-BTPROTO_HCI irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI hci_sk_list.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI sock_cookie_ida.xa_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI hci_sk_list.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI clock-AF_BLUETOOTH irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_BLUETOOTH-BTPROTO_HCI irq_context: 0 &sb->s_type->i_mutex_key#10 hci_dev_list_lock irq_context: 0 rtnl_mutex (inetaddr_validator_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1021 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1021 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &(&fn_net->fib_chain)->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock stock_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rnp->exp_wq[3] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &meta->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &q->sysfs_dir_lock &n->list_lock irq_context: 0 &q->sysfs_dir_lock &n->list_lock &c->lock irq_context: 0 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 namespace_sem mnt_id_ida.xa_lock pool_lock#2 irq_context: 0 rcu_read_lock &undo_list->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex key#19 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bat_priv->forw_bcast_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bat_priv->forw_bat_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&forw_packet_aggr->delayed_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bat_priv->tt.changes_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock deferred_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock (console_sem).lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock console_lock console_srcu console_owner irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock deferred_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) &br->multicast_lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) (&p->rexmit_timer) irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) &base->lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) (&p->timer) irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) pool_lock#2 irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) krc.lock irq_context: softirq (&mp->timer) irq_context: softirq (&mp->timer) &br->multicast_lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rhashtable_bucket irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock &pool->lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_QIPCRTR irq_context: 0 nfc_devlist_mutex subsys mutex#39 &rq->__lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) (&mp->timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (switchdev_blocking_notif_chain).rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&pmctx->ip6_mc_router_timer) irq_context: 0 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &dentry->d_lock sysctl_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock sysctl_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &n->list_lock &c->lock irq_context: 0 uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &ei->socket.wq.wait irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 rtnl_mutex &nr_netdev_addr_lock_key irq_context: 0 rtnl_mutex listen_lock irq_context: 0 rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 pernet_ops_rwsem irq_context: 0 pernet_ops_rwsem stack_depot_init_mutex irq_context: 0 pernet_ops_rwsem crngs.lock irq_context: 0 pernet_ops_rwsem proc_inum_ida.xa_lock irq_context: 0 pernet_ops_rwsem fs_reclaim irq_context: 0 pernet_ops_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem pool_lock#2 irq_context: 0 pernet_ops_rwsem proc_subdir_lock irq_context: 0 pernet_ops_rwsem &c->lock irq_context: 0 pernet_ops_rwsem proc_subdir_lock irq_context: 0 pernet_ops_rwsem &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem &____s->seqcount irq_context: 0 pernet_ops_rwsem sysctl_lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem &sb->s_type->i_lock_key#8 irq_context: 0 pernet_ops_rwsem &dir->lock irq_context: 0 pernet_ops_rwsem &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK k-slock-AF_NETLINK irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rhashtable_bucket irq_context: 0 pernet_ops_rwsem k-slock-AF_NETLINK irq_context: 0 pernet_ops_rwsem nl_table_lock irq_context: 0 pernet_ops_rwsem nl_table_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nl_table_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem nl_table_wait.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex irq_context: 0 pernet_ops_rwsem nl_table_lock irq_context: 0 pernet_ops_rwsem &net->rules_mod_lock irq_context: 0 pernet_ops_rwsem percpu_counters_lock irq_context: 0 pernet_ops_rwsem batched_entropy_u32.lock irq_context: 0 pernet_ops_rwsem k-slock-AF_INET/1 irq_context: 0 pernet_ops_rwsem &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem cache_list_lock irq_context: 0 pernet_ops_rwsem tk_core.seq.seqcount irq_context: 0 pernet_ops_rwsem &k->list_lock irq_context: 0 pernet_ops_rwsem lock irq_context: 0 pernet_ops_rwsem lock kernfs_idr_lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 pernet_ops_rwsem uevent_sock_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem uevent_sock_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem uevent_sock_mutex nl_table_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem &sn->pipefs_sb_lock irq_context: 0 pernet_ops_rwsem &n->list_lock irq_context: 0 pernet_ops_rwsem &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem &s->s_inode_list_lock irq_context: 0 pernet_ops_rwsem nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_ecache_mutex irq_context: 0 pernet_ops_rwsem nf_log_mutex irq_context: 0 pernet_ops_rwsem &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &base->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock key irq_context: 0 &vma->vm_lock->lock pcpu_lock irq_context: 0 &vma->vm_lock->lock percpu_counters_lock irq_context: 0 &vma->vm_lock->lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &xt[i].mutex remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 key#24 irq_context: 0 pernet_ops_rwsem ipvs->est_mutex irq_context: 0 pernet_ops_rwsem ipvs->est_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem ipvs->est_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &c->lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem &base->lock irq_context: 0 pernet_ops_rwsem &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem &hashinfo->lock#2 irq_context: 0 pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock irq_context: 0 pernet_ops_rwsem proc_inum_ida.xa_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &net->ipv4.ra_mutex irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock key irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock pcpu_lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem &this->receive_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &tn->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &x->wait#9 irq_context: 0 pernet_ops_rwsem rtnl_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &k->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex bus_type_sem irq_context: 0 pernet_ops_rwsem rtnl_mutex sysfs_symlink_target_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem irq_context: 0 pernet_ops_rwsem rtnl_mutex &dev->power.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dpm_list_mtx irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex subsys mutex#20 irq_context: 0 pernet_ops_rwsem rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &dir->lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_base_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex input_pool.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex batched_entropy_u32.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &tbl->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex sysctl_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex nl_table_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex nl_table_wait.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex proc_subdir_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex proc_subdir_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &pnettable->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex smc_ib_devices.mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 pernet_ops_rwsem rdma_nets.xa_lock irq_context: 0 pernet_ops_rwsem devices_rwsem irq_context: 0 pernet_ops_rwsem hwsim_netgroup_ida.xa_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex stack_depot_init_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex uevent_sock_mutex.wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &n->list_lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock base_crng.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex pool_lock#2 irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1033 irq_context: 0 rcu_state.barrier_mutex rcu_read_lock &rq->__lock irq_context: 0 rcu_state.barrier_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1033 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1033 irq_context: 0 cb_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock krc.lock &base->lock &obj_hash[i].lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex batched_entropy_u8.lock crngs.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&pool->idle_timer) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&pool->idle_timer) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock &____s->seqcount irq_context: 0 tasklist_lock rcu_read_lock &sighand->siglock irq_context: 0 tasklist_lock &sighand->siglock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &x->wait#17 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &x->wait#17 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock percpu_counters_lock irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock pcpu_lock stock_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &n->list_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 &rq->__lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &base->lock irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 tasklist_lock &sighand->siglock &____s->seqcount#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#559 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &meta->lock irq_context: 0 fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER &n->list_lock irq_context: 0 sb_writers#5 &sb->s_type->i_lock_key &xa->xa_lock#9 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 __ip_vs_mutex rcu_node_0 irq_context: 0 __ip_vs_mutex &rcu_state.expedited_wq irq_context: 0 __ip_vs_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &pcp->lock &zone->lock irq_context: 0 &q->sysfs_dir_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &q->sysfs_dir_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock kfence_freelist_lock irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &hdev->lock &n->list_lock irq_context: 0 &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 batched_entropy_u8.lock irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &n->list_lock &c->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &rq->__lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1022 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex pcpu_alloc_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex pool_lock#2 irq_context: 0 cb_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &meta->lock irq_context: 0 cb_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock kfence_freelist_lock irq_context: 0 &f->f_pos_lock sb_writers#4 fs_reclaim &rq->__lock irq_context: 0 &f->f_pos_lock &lock->wait_lock irq_context: 0 &f->f_pos_lock sb_writers#4 &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &meta->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events deferred_process_work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem key irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&barr->work) &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&barr->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#559 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss mount_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx console_lock console_srcu console_owner_lock irq_context: 0 misc_mtx console_lock console_srcu console_owner irq_context: 0 rtnl_mutex dev_addr_sem &tn->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &rq->__lock irq_context: 0 &type->i_mutex_dir_key#2 rcu_node_0 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex sk_lock-AF_INET fs_reclaim &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss mount_lock rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex.wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &sem->wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#272 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &____s->seqcount#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &n->list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &n->list_lock &c->lock irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &tbl->lock &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex batched_entropy_u32.lock crngs.lock irq_context: 0 rtnl_mutex &tbl->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex failover_lock irq_context: 0 rtnl_mutex &tbl->lock quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1134 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1134 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#465 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock irq_context: 0 &pool->lock &x->wait#10 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex.wait_lock irq_context: 0 pernet_ops_rwsem &p->pi_lock irq_context: 0 pernet_ops_rwsem &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1163 irq_context: 0 &hdev->req_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &rq->__lock cpu_asid_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem pcpu_lock irq_context: 0 &xt[i].mutex fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock rcu_read_lock stock_lock irq_context: 0 rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem proc_inum_ida.xa_lock &c->lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &c->lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events fqdir_free_work &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_lock_key#27 irq_context: 0 &f->f_pos_lock &sb->s_type->i_mutex_key#17 irq_context: 0 &f->f_pos_lock &sb->s_type->i_mutex_key#17 &mm->mmap_lock irq_context: 0 &f->f_pos_lock &sb->s_type->i_mutex_key#17 vmap_area_lock irq_context: 0 &f->f_pos_lock &sb->s_type->i_mutex_key#17 &dentry->d_lock irq_context: 0 &f->f_pos_lock &sb->s_type->i_mutex_key#17 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &sb->s_type->i_mutex_key#17 sb_writers#13 mount_lock irq_context: 0 &f->f_pos_lock &sb->s_type->i_mutex_key#17 sb_writers#13 tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &sb->s_type->i_mutex_key#17 sb_writers#13 &sb->s_type->i_lock_key#27 irq_context: 0 &f->f_pos_lock &sb->s_type->i_mutex_key#17 sb_writers#13 &wb->list_lock irq_context: 0 &f->f_pos_lock &sb->s_type->i_mutex_key#17 sb_writers#13 &wb->list_lock &sb->s_type->i_lock_key#27 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &base->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1034 irq_context: 0 ebt_mutex &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &hdev->lock &____s->seqcount#2 irq_context: 0 (wq_completion)events pcpu_balance_work &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock quarantine_lock irq_context: 0 misc_mtx (wq_completion)nfc4_nci_rx_wq#126 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex device_links_lock &rq->__lock irq_context: 0 sb_writers#4 rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 wq_pool_attach_mutex wq_pool_attach_mutex.wait_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 sb_writers#4 &rq->__lock cpu_asid_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu quarantine_lock irq_context: 0 cgroup_threadgroup_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key &n->list_lock &c->lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu &rq->__lock irq_context: 0 &dev->filelist_mutex &rq->__lock irq_context: 0 &dev->filelist_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->w) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1135 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1135 irq_context: 0 pernet_ops_rwsem rtnl_mutex key irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex percpu_counters_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &tcp_hashinfo.bhash[i].lock k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &h->lhash2[i].lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem wq_pool_mutex irq_context: 0 pernet_ops_rwsem wq_pool_mutex &wq->mutex irq_context: 0 pernet_ops_rwsem pcpu_lock irq_context: 0 pernet_ops_rwsem &list->lock#4 irq_context: 0 pernet_ops_rwsem &dir->lock#2 irq_context: 0 pernet_ops_rwsem ptype_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock rhashtable_bucket irq_context: 0 pernet_ops_rwsem k-clock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC k-slock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-slock-AF_TIPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC k-slock-AF_RXRPC irq_context: 0 pernet_ops_rwsem k-slock-AF_RXRPC irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fs_reclaim irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex crngs.lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &sb->s_type->i_lock_key#8 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &dir->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 &table->hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 &table->hash[i].lock k-clock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-sk_lock-AF_INET6 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex k-slock-AF_INET6 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex cpu_hotplug_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex kthread_create_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &x->wait irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rdma_nets.xa_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &c->lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &x->wait#21 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &local->services_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC fs_reclaim irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC pool_lock#2 irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &c->lock irq_context: 0 pernet_ops_rwsem &rxnet->conn_lock irq_context: 0 pernet_ops_rwsem &call->waitq irq_context: 0 pernet_ops_rwsem &rx->call_lock irq_context: 0 pernet_ops_rwsem &rxnet->call_lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem net_rwsem irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock sysctl_lock irq_context: 0 sb_writers#4 &____s->seqcount#10 irq_context: 0 sb_writers#4 &(&net->ipv4.ping_group_range.lock)->lock irq_context: 0 sb_writers#4 &(&net->ipv4.ping_group_range.lock)->lock &____s->seqcount#10 irq_context: 0 misc_mtx &dir->lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex &r->consumer_lock irq_context: 0 rtnl_mutex &r->consumer_lock &r->producer_lock irq_context: 0 rtnl_mutex failover_lock irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER pool_lock#2 irq_context: 0 rtnl_mutex &mm->mmap_lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock batched_entropy_u32.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &n->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex.wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER &c->lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &c->lock irq_context: 0 pernet_ops_rwsem batched_entropy_u32.lock crngs.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &____s->seqcount irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock &c->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &c->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex &n->lock irq_context: 0 rtnl_mutex &n->lock &(&n->ha_lock)->lock irq_context: 0 rtnl_mutex &n->lock &(&n->ha_lock)->lock &____s->seqcount#8 irq_context: 0 rtnl_mutex &tbl->lock &n->lock irq_context: 0 rtnl_mutex rcu_read_lock lock#8 irq_context: 0 rtnl_mutex rcu_read_lock id_table_lock irq_context: 0 rtnl_mutex &n->lock irq_context: 0 rtnl_mutex &n->lock &____s->seqcount#8 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex dev_addr_sem &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &n->lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &n->lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock nl_table_lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock nl_table_wait.lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock rcu_read_lock lock#8 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock rcu_read_lock id_table_lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &dir->lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock krc.lock irq_context: 0 rtnl_mutex _xmit_ETHER pool_lock#2 irq_context: 0 rtnl_mutex &ndev->lock pool_lock#2 irq_context: 0 rtnl_mutex &ndev->lock &____s->seqcount irq_context: 0 rtnl_mutex &ndev->lock &dir->lock#2 irq_context: 0 rtnl_mutex &ndev->lock pcpu_lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock pool_lock#2 irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock nl_table_lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock nl_table_wait.lock irq_context: 0 rtnl_mutex rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex pcpu_lock irq_context: 0 &x->wait#21 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#21 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex batched_entropy_u32.lock crngs.lock irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key pool_lock#2 irq_context: 0 rtnl_mutex (switchdev_blocking_notif_chain).rwsem irq_context: 0 rtnl_mutex &br->hash_lock irq_context: 0 rtnl_mutex &br->hash_lock &____s->seqcount irq_context: 0 rtnl_mutex &br->hash_lock &c->lock irq_context: 0 rtnl_mutex &br->hash_lock pool_lock#2 irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rhashtable_bucket irq_context: 0 rtnl_mutex &br->hash_lock nl_table_lock irq_context: 0 rtnl_mutex &br->hash_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->hash_lock nl_table_wait.lock irq_context: 0 rtnl_mutex rcu_read_lock rhashtable_bucket irq_context: 0 rtnl_mutex nf_hook_mutex irq_context: 0 rtnl_mutex nf_hook_mutex fs_reclaim irq_context: 0 rtnl_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex nf_hook_mutex pool_lock#2 irq_context: 0 rtnl_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 rtnl_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 rtnl_mutex j1939_netdev_lock irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &r->producer_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &lock->wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &lock->wait_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events (linkwatch_work).work &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2 irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2 pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex quarantine_lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu irq_context: 0 pernet_ops_rwsem remove_cache_srcu quarantine_lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &c->lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &n->list_lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &____s->seqcount irq_context: 0 rtnl_mutex dev_addr_sem &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem &n->list_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &____s->seqcount irq_context: 0 rtnl_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &bat_priv->tvlv.handler_list_lock irq_context: 0 rtnl_mutex &bat_priv->tvlv.handler_list_lock pool_lock#2 irq_context: 0 rtnl_mutex &bat_priv->tvlv.container_list_lock irq_context: 0 rtnl_mutex &bat_priv->tvlv.handler_list_lock &____s->seqcount irq_context: 0 rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key irq_context: 0 rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key pool_lock#2 irq_context: 0 rtnl_mutex &bat_priv->softif_vlan_list_lock irq_context: 0 rtnl_mutex &bat_priv->softif_vlan_list_lock pool_lock#2 irq_context: 0 rtnl_mutex key#16 irq_context: 0 rtnl_mutex &bat_priv->tt.changes_list_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: softirq &(&bat_priv->nc.work)->timer irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) key#17 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) key#18 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 rtnl_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 rtnl_mutex uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: softirq &(&idev->mc_ifc_work)->timer rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 rtnl_mutex kernfs_idr_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex noop_qdisc.q.lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex tk_core.seq.seqcount irq_context: 0 rtnl_mutex &wq->mutex irq_context: 0 rtnl_mutex &wq->mutex &pool->lock irq_context: 0 rtnl_mutex wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 rtnl_mutex init_lock irq_context: 0 rtnl_mutex init_lock slab_mutex irq_context: 0 rtnl_mutex init_lock slab_mutex fs_reclaim irq_context: 0 rtnl_mutex init_lock slab_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex init_lock slab_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex init_lock slab_mutex pool_lock#2 irq_context: 0 rtnl_mutex init_lock slab_mutex pcpu_alloc_mutex irq_context: 0 rtnl_mutex init_lock slab_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 rtnl_mutex init_lock slab_mutex &root->kernfs_rwsem irq_context: 0 rtnl_mutex init_lock slab_mutex &k->list_lock irq_context: 0 rtnl_mutex init_lock slab_mutex lock irq_context: 0 rtnl_mutex init_lock slab_mutex lock kernfs_idr_lock irq_context: 0 rtnl_mutex init_lock slab_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex init_lock slab_mutex &c->lock irq_context: 0 rtnl_mutex init_lock slab_mutex &____s->seqcount irq_context: 0 rtnl_mutex init_lock fs_reclaim irq_context: 0 rtnl_mutex init_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex init_lock &zone->lock irq_context: 0 rtnl_mutex init_lock &____s->seqcount irq_context: 0 rtnl_mutex init_lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex init_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex init_lock &base->lock irq_context: 0 rtnl_mutex init_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex init_lock crngs.lock irq_context: 0 rtnl_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex hrtimer_bases.lock irq_context: 0 rtnl_mutex hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 rtnl_mutex hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 rtnl_mutex batched_entropy_u8.lock irq_context: 0 rtnl_mutex kfence_freelist_lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex dev_addr_sem &rq->__lock irq_context: 0 rtnl_mutex _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex deferred_lock irq_context: 0 (wq_completion)events deferred_process_work irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock _xmit_ETHER irq_context: 0 rtnl_mutex &br->lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rhashtable_bucket irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &c->lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &____s->seqcount irq_context: 0 rtnl_mutex &br->lock &br->hash_lock pool_lock#2 irq_context: 0 rtnl_mutex &br->lock &br->hash_lock nl_table_lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock nl_table_wait.lock irq_context: 0 rtnl_mutex &br->lock lweventlist_lock irq_context: 0 rtnl_mutex &br->lock lweventlist_lock pool_lock#2 irq_context: 0 rtnl_mutex &br->lock lweventlist_lock &dir->lock#2 irq_context: 0 rtnl_mutex &pn->hash_lock irq_context: 0 rtnl_mutex &net->ipv6.fib6_gc_lock rcu_read_lock &tb->tb6_lock irq_context: 0 rtnl_mutex &net->ipv6.fib6_gc_lock rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 rtnl_mutex &net->ipv6.fib6_gc_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &bat_priv->tvlv.handler_list_lock &c->lock irq_context: 0 rtnl_mutex &bat_priv->tvlv.handler_list_lock &n->list_lock irq_context: 0 rtnl_mutex &bat_priv->tvlv.handler_list_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &____s->seqcount irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex deferred_lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex (switchdev_blocking_notif_chain).rwsem irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &dir->lock#2 irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events deferred_process_work &p->pi_lock irq_context: 0 (wq_completion)events deferred_process_work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events deferred_process_work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex lweventlist_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock &____s->seqcount irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock quarantine_lock irq_context: 0 rtnl_mutex rcu_read_lock &bond->stats_lock/1 irq_context: softirq &(&slave->notify_work)->timer irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rq->__lock irq_context: 0 bt_proto_lock &c->lock irq_context: 0 bt_proto_lock &____s->seqcount irq_context: 0 &data->open_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &data->open_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 &data->open_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 batched_entropy_u8.lock crngs.lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock key irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex &rq->__lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &rdev->wiphy.mtx &rq->__lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 misc_mtx pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET &obj_hash[i].lock pool_lock irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu pool_lock#2 irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu quarantine_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock cpu_asid_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &vma->vm_lock->lock batched_entropy_u8.lock crngs.lock irq_context: 0 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &pcp->lock &zone->lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock batched_entropy_u8.lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock kfence_freelist_lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &meta->lock irq_context: 0 tomoyo_ss remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1135 irq_context: 0 rtnl_mutex console_owner_lock irq_context: 0 rtnl_mutex console_owner irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1136 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1136 irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: softirq &c->lock batched_entropy_u8.lock crngs.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1163 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1163 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1163 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1163 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1163 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &ent->pde_unload_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 pcpu_lock stock_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 &meta->lock irq_context: 0 &sb->s_type->i_mutex_key#10 kfence_freelist_lock irq_context: 0 &type->i_mutex_dir_key#4 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->master_mutex &n->list_lock irq_context: 0 &dev->master_mutex &n->list_lock &c->lock irq_context: 0 rtnl_mutex uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 rtnl_mutex uevent_sock_mutex kfence_freelist_lock irq_context: 0 rtnl_mutex uevent_sock_mutex &meta->lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex sk_lock-AF_INET rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex sk_lock-AF_INET rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: 0 cb_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 cb_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) stock_lock irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim &rq->__lock irq_context: 0 &p->lock &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&aux->work) pcpu_lock stock_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) purge_vmap_area_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&aux->work) quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1163 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock fs_reclaim irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock &____s->seqcount irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock stock_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock &c->lock irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock pool_lock#2 irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock &mm->page_table_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock ptlock_ptr(ptdesc)#2 irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1164 irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &dev->mode_config.idr_mutex &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#141 irq_context: 0 &dev->mode_config.idr_mutex &file->master_lookup_lock irq_context: 0 &dev->mode_config.mutex irq_context: 0 &f->f_pos_lock sb_writers#4 irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock &n->list_lock &c->lock irq_context: 0 sb_writers#4 oom_adj_mutex &rq->__lock cpu_asid_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] &cfs_rq->removed.lock irq_context: 0 &mdev->graph_mutex &mm->mmap_lock &rq->__lock irq_context: 0 &mdev->graph_mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex &obj_hash[i].lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_node_0 irq_context: 0 pcpu_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET &____s->seqcount irq_context: 0 &f->f_pos_lock sb_writers#4 sysctl_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex pool_lock#2 irq_context: 0 &f->f_pos_lock sb_writers#4 fs_reclaim irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start stock_lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start key irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 &mdev->graph_mutex &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &____s->seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &obj_hash[i].lock irq_context: 0 &dev->mutex dpm_list_mtx &rq->__lock irq_context: 0 &dev->mutex dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ebt_mutex &cfs_rq->removed.lock irq_context: 0 rtnl_mutex lweventlist_lock batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &lock->wait_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&peer->timer_persistent_keepalive) batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim pcpu_lock irq_context: softirq (&peer->timer_persistent_keepalive) kfence_freelist_lock irq_context: 0 &mdev->graph_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex.wait_lock irq_context: 0 misc_mtx cpu_hotplug_lock &p->pi_lock irq_context: 0 misc_mtx cpu_hotplug_lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock sb_writers#4 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &idev->mc_lock &____s->seqcount irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount irq_context: 0 rtnl_mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock sb_writers#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: softirq rcu_read_lock hwsim_radio_lock init_task.mems_allowed_seq.seqcount irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &c->lock irq_context: 0 &f->f_pos_lock sb_writers#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 &c->lock irq_context: 0 rtnl_mutex dev_addr_sem quarantine_lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: softirq &(&idev->mc_ifc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &obj_hash[i].lock irq_context: 0 rtnl_mutex _xmit_ETHER/1 _xmit_ETHER pool_lock#2 irq_context: 0 tasklist_lock rcu_read_lock &p->pi_lock irq_context: 0 tasklist_lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 ebt_mutex &mm->mmap_lock pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET batched_entropy_u8.lock crngs.lock irq_context: 0 rcu_read_lock &sighand->siglock &____s->seqcount#2 irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem quarantine_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx qrtr_node_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fs_reclaim &rq->__lock irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 cb_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->alloc_lock &x->wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1164 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu pcpu_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &r->producer_lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 _xmit_ETHER krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) crngs.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &r->producer_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex rcu_node_0 irq_context: 0 rtnl_mutex lweventlist_lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex crngs.lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex pool_lock#2 irq_context: 0 rtnl_mutex ptype_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex batched_entropy_u8.lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &____s->seqcount irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock &base->lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex _xmit_NONE irq_context: 0 rtnl_mutex lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &bond->stats_lock irq_context: 0 &root->kernfs_rwsem stock_lock irq_context: 0 &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &n->list_lock &c->lock irq_context: 0 &xt[i].mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&mp->timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&br->mcast_gc_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex &x->wait#24 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 &f->f_pos_lock sb_writers#4 &mm->mmap_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_LOOPBACK irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex stock_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex key irq_context: 0 rtnl_mutex _xmit_ETHER &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex pcpu_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex percpu_counters_lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 &f->f_pos_lock sb_writers#4 &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu irq_context: 0 misc_mtx (wq_completion)nfc4_nci_cmd_wq#129 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#413 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#11 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: softirq &(&bat_priv->mcast.work)->timer irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock key#16 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &bat_priv->tt.changes_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &bat_priv->tvlv.container_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &meta->lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 rtnl_mutex &hsr->list_lock irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&slave->notify_work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex pin_fs_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 rtnl_mutex _xmit_ETHER &pcp->lock &zone->lock irq_context: 0 rtnl_mutex _xmit_ETHER &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#413 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &____s->seqcount#2 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &c->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu percpu_counters_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock quarantine_lock irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&pool->idle_timer) irq_context: softirq (&pool->idle_timer) &pool->lock irq_context: 0 rtnl_mutex devnet_rename_sem quarantine_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &cfs_rq->removed.lock irq_context: softirq (&pool->idle_timer) &pool->lock &obj_hash[i].lock irq_context: softirq (&pool->idle_timer) &pool->lock &base->lock irq_context: softirq (&pool->idle_timer) &pool->lock &base->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &base->lock irq_context: 0 media_devnode_lock irq_context: 0 media_devnode_lock &rq->__lock irq_context: 0 media_devnode_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (debug_obj_work).work quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 &mdev->graph_mutex &mm->mmap_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock key#23 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem &rcu_state.expedited_wq irq_context: 0 rtnl_mutex dev_addr_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex dev_addr_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock percpu_counters_lock irq_context: 0 rtnl_mutex stack_depot_init_mutex &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 (console_sem).lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 console_lock console_srcu console_owner irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex rcu_read_lock mount_lock irq_context: 0 rtnl_mutex rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 rtnl_mutex mount_lock irq_context: 0 rtnl_mutex mount_lock mount_lock.seqcount irq_context: 0 rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex gdp_mutex pool_lock#2 irq_context: 0 rtnl_mutex gdp_mutex lock irq_context: 0 rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 rtnl_mutex &k->k_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pcpu_alloc_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock fs_reclaim stock_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim key irq_context: 0 &vma->vm_lock->lock fs_reclaim pcpu_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim percpu_counters_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim pcpu_lock stock_lock irq_context: 0 rtnl_mutex &bridge_netdev_addr_lock_key/1 &____s->seqcount#2 irq_context: 0 rtnl_mutex &bridge_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events &rq->__lock irq_context: 0 rcu_read_lock &f->f_owner.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex.wait_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER &____s->seqcount#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 __ip_vs_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 __ip_vs_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sched_map-wait-type-override rcu_node_0 irq_context: 0 sched_map-wait-type-override &rcu_state.expedited_wq irq_context: 0 rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key irq_context: 0 rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key &____s->seqcount irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key pool_lock#2 irq_context: 0 sched_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sched_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &ndev->lock &c->lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex lock kernfs_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: softirq (&app->join_timer) irq_context: softirq (&app->join_timer) &app->lock irq_context: softirq (&app->join_timer) &list->lock#10 irq_context: softirq (&app->join_timer) &app->lock batched_entropy_u32.lock irq_context: softirq (&app->join_timer) &app->lock &obj_hash[i].lock irq_context: softirq (&app->join_timer) &app->lock &base->lock irq_context: softirq (&app->join_timer) &app->lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3 irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3 pool_lock#2 irq_context: 0 rtnl_mutex &xa->xa_lock#18 irq_context: 0 rtnl_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &meta->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) kfence_freelist_lock irq_context: softirq &(&bat_priv->orig_work)->timer irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 rcu_read_lock &pool->lock irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) key#19 irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work irq_context: 0 (wq_completion)events_power_efficient (gc_work).work tk_core.seq.seqcount irq_context: 0 (wq_completion)events_power_efficient (gc_work).work "ratelimiter_table_lock" irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &base->lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &base->lock &obj_hash[i].lock irq_context: softirq (&app->join_timer)#2 irq_context: softirq (&app->join_timer)#2 &app->lock#2 irq_context: softirq (&app->join_timer)#2 &list->lock#11 irq_context: softirq (&app->join_timer)#2 batched_entropy_u32.lock irq_context: softirq (&app->join_timer)#2 &obj_hash[i].lock irq_context: softirq (&app->join_timer)#2 &base->lock irq_context: softirq (&app->join_timer)#2 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 &p->lock remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 &____s->seqcount#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &ipvlan->addrs_lock &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &ipvlan->addrs_lock &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &ipvlan->addrs_lock &n->list_lock &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock &____s->seqcount#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock &____s->seqcount irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex rcu_read_lock &ipvlan->addrs_lock &c->lock irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex pcpu_lock stock_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &bridge_netdev_addr_lock_key/1 &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &wg->socket_update_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock nl_table_wait.lock &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex quarantine_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &obj_hash[i].lock pool_lock irq_context: 0 sched_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &cfs_rq->removed.lock irq_context: 0 tty_mutex &tty->legacy_mutex remove_cache_srcu irq_context: 0 &f->f_owner.lock irq_context: 0 &pipe->mutex/1 rcu_node_0 irq_context: 0 sb_writers#3 jbd2_handle &pcp->lock &zone->lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6/1 &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#3/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock batched_entropy_u32.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock &tap_major->minor_lock irq_context: 0 rtnl_mutex rcu_read_lock &tap_major->minor_lock pool_lock#2 irq_context: 0 rtnl_mutex req_lock irq_context: 0 rtnl_mutex &x->wait#11 irq_context: 0 rtnl_mutex subsys mutex#75 irq_context: 0 rtnl_mutex subsys mutex#75 &k->k_lock irq_context: 0 rtnl_mutex &xa->xa_lock#4 pool_lock#2 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex net_rwsem &rq->__lock irq_context: 0 rtnl_mutex net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &sem->wait_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &p->pi_lock irq_context: 0 kn->active#46 fs_reclaim irq_context: 0 kn->active#46 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock irq_context: 0 kn->active#47 fs_reclaim irq_context: 0 kn->active#47 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock nsim_bus_dev_ids.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &x->wait#9 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock bus_type_sem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock sysfs_symlink_target_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock dpm_list_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex device_links_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex fwnode_link_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex device_links_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &dev->devres_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex pinctrl_list_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex pinctrl_maps_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex pinctrl_list_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex pinctrl_list_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex sysfs_symlink_target_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex devlinks.xa_lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &meta->lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock rcu_node_0 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pcpu_alloc_mutex fs_reclaim &rq->__lock irq_context: 0 pcpu_alloc_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1024 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#407 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#407 &rq->__lock irq_context: 0 &kernfs_locks->open_file_mutex[count] krc.lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1069 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex pool_lock#2 irq_context: 0 tty_mutex &tty->legacy_mutex remove_cache_srcu quarantine_lock irq_context: 0 tty_mutex &tty->legacy_mutex remove_cache_srcu &c->lock irq_context: 0 tty_mutex &tty->legacy_mutex remove_cache_srcu &n->list_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &p->pi_lock irq_context: 0 tty_mutex &tty->legacy_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &tty->legacy_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &rq->__lock irq_context: 0 rtnl_mutex remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex remove_cache_srcu fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 rtnl_mutex remove_cache_srcu fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex remove_cache_srcu fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &p->lock rcu_node_0 irq_context: 0 &p->lock &rcu_state.expedited_wq irq_context: 0 &p->lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tty->legacy_mutex &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock kfence_freelist_lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &n->list_lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 rtnl_mutex gdp_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &c->lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex &rq->__lock irq_context: 0 sk_lock-AF_INET &base->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sk_lock-AF_INET &base->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sk_lock-AF_INET &base->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sk_lock-AF_INET &base->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sk_lock-AF_INET &base->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &simple_offset_xa_lock &obj_hash[i].lock pool_lock irq_context: softirq rcu_read_lock &br->hash_lock &base->lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_event_queue_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 remove_cache_srcu stock_lock irq_context: 0 remove_cache_srcu pcpu_lock stock_lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pcpu_lock irq_context: 0 cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 pcpu_alloc_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex.wait_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock &br->hash_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &pcp->lock &zone->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1024 &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 rtnl_mutex &wg->device_update_lock &wg->socket_update_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock (&timer.timer) irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1164 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 vlan_ioctl_mutex rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &____s->seqcount#2 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &simple_offset_xa_lock &n->list_lock irq_context: 0 &f->f_pos_lock rcu_node_0 irq_context: 0 &f->f_pos_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &c->lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &n->list_lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock pool_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 lock#4 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem stock_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_lock_key#16 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex devnet_rename_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 &hdev->req_lock (wq_completion)hci5#4 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx key irq_context: 0 &mm->mmap_lock sb_writers#5 &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx pcpu_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx percpu_counters_lock irq_context: 0 &type->i_mutex_dir_key#2 &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx pcpu_lock stock_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_lock_key#16 &dentry->d_lock irq_context: 0 &resv_map->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &cfs_rq->removed.lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &u->iolock &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 rcu_state.barrier_mutex &rq->__lock cpu_asid_lock irq_context: 0 pernet_ops_rwsem fs_reclaim &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock &wg->socket_update_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem fill_pool_map-wait-type-override pool_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER &n->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex dpm_list_mtx &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1025 irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem fs_reclaim &rq->__lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &base->lock irq_context: 0 &mm->mmap_lock &mm->page_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex __ip_vs_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex __ip_vs_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1027 irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &bat_priv->forw_bat_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 cb_lock fs_reclaim &rq->__lock irq_context: 0 &root->kernfs_iattr_rwsem rcu_node_0 irq_context: 0 &root->kernfs_iattr_rwsem &rcu_state.expedited_wq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex.wait_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 rcu_read_lock &rq->__lock irq_context: 0 kn->active#46 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock nsim_bus_dev_list_lock.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex gdp_mutex &c->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx batched_entropy_u8.lock crngs.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &pcp->lock &zone->lock irq_context: 0 &vma->vm_lock->lock &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex deferred_probe_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &q->sysfs_dir_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 key irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex irq_context: 0 &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex fs_reclaim irq_context: 0 &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex pool_lock#2 irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER &c->lock irq_context: 0 tty_mutex devpts_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &tty->legacy_mutex/1 &tty->ldisc_sem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &tty->legacy_mutex/1 &tty->files_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 &tty->legacy_mutex/1 &f->f_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &tty->legacy_mutex/1 &f->f_lock fasync_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 nf_sockopt_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 key#24 irq_context: 0 rtnl_mutex sk_lock-AF_INET rcu_read_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex probe_waitqueue.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock subsys mutex#76 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex devnet_rename_sem &____s->seqcount irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &obj_hash[i].lock irq_context: 0 kn->active#47 &c->lock irq_context: 0 kn->active#47 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex devlinks.xa_lock pool_lock#2 irq_context: softirq &(&conn->info_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&conn->info_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[1] &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle bit_wait_table + i irq_context: 0 &data->open_mutex &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_node_0 irq_context: 0 &fsnotify_mark_srcu fs_reclaim &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &mm->mmap_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 &tty->legacy_mutex/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &sem->wait_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex devnet_rename_sem &sem->wait_lock irq_context: 0 rtnl_mutex devnet_rename_sem &p->pi_lock irq_context: 0 rtnl_mutex devnet_rename_sem &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) irq_context: 0 rtnl_mutex &devlink_port->type_lock irq_context: 0 rtnl_mutex wq_pool_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &sem->wait_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &p->pi_lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock xps_map_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&hwstats->traffic_dw)->timer irq_context: softirq &(&hwstats->traffic_dw)->timer rcu_read_lock &pool->lock irq_context: softirq &(&hwstats->traffic_dw)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&hwstats->traffic_dw)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&hwstats->traffic_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&hwstats->traffic_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &hwstats->hwsdev_list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex sysctl_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex sysctl_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex sysctl_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &data->fib_event_queue_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem &root->kernfs_rwsem &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nfnl_subsys_ipset &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle percpu_counters_lock irq_context: 0 &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &base->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&pmctx->ip4_mc_router_timer) irq_context: softirq &(&bat_priv->bla.work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&bat_priv->bla.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&bat_priv->bla.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&ht->run_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex deferred_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock lweventlist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock lweventlist_lock &dir->lock#2 irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&mp->timer) &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &zone->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &n->list_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock stock_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&br->gc_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex j1939_netdev_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &qdisc_xmit_lock_key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &qdisc_xmit_lock_key#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &vlan_netdev_xmit_lock_key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &batadv_netdev_xmit_lock_key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &qdisc_xmit_lock_key#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &qdisc_xmit_lock_key#4 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_LOOPBACK#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 &sch->q.lock irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-sk_lock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-slock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &table->hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &table->hash[i].lock &table->hash2[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-clock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_lock_key#8 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &xa->xa_lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &fsnotify_mark_srcu irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-sk_lock-AF_INET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-sk_lock-AF_INET k-slock-AF_INET#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-slock-AF_INET#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-clock-AF_INET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER (console_sem).lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner &port_lock_key irq_context: 0 cb_lock genl_mutex rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &list->lock#14 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock (&peer->timer_retransmit_handshake) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock (&peer->timer_send_keepalive) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock (&peer->timer_new_handshake) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock (&peer->timer_zero_key_material) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock (&peer->timer_persistent_keepalive) irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock pcpu_alloc_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock pcpu_alloc_mutex pcpu_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock tk_core.seq.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &table->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &peer->endpoint_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex.wait_lock irq_context: 0 cb_lock genl_mutex &p->pi_lock irq_context: 0 cb_lock genl_mutex &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock (work_completion)(&peer->clear_peer_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &handshake->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &r->consumer_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->socket_update_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex &xa->xa_lock#4 &c->lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex pin_fs_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rcu_state.expedited_wq irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &nn->netlink_tap_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &data->fib_event_queue_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rcu_node_0 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex &wq->mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock batched_entropy_u8.lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER krc.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&bond->mii_work)->work) irq_context: 0 rtnl_mutex rcu_read_lock &data->fib_event_queue_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&bond->arp_work)->work) irq_context: 0 rtnl_mutex &ndev->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &xt[i].mutex remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &____s->seqcount#2 irq_context: 0 sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &sem->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem pool_lock#2 irq_context: softirq (&pool->idle_timer) rcu_read_lock &pool->lock irq_context: softirq (&pool->idle_timer) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&pool->idle_timer) rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&pool->idle_timer) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &pool->lock &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &pool->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) wq_pool_attach_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock &c->lock irq_context: 0 &f->f_pos_lock sb_writers#4 &rq->__lock irq_context: 0 &f->f_pos_lock sb_writers#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &rq->__lock &cfs_rq->removed.lock irq_context: 0 &net->packet.sklist_lock &rq->__lock irq_context: 0 &tty->legacy_mutex/1 pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#488 irq_context: 0 sb_writers#5 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#509 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#488 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 _xmit_ETHER krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 _xmit_ETHER krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 _xmit_ETHER &obj_hash[i].lock pool_lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&app->join_timer)#2 batched_entropy_u32.lock crngs.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#488 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#488 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#482 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_validator_chain).rwsem &rq->__lock irq_context: softirq &(&hdev->cmd_timer)->timer irq_context: softirq &(&hdev->cmd_timer)->timer rcu_read_lock &pool->lock irq_context: softirq &(&hdev->cmd_timer)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&hdev->cmd_timer)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&bond->alb_work)->work) irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&bond->ad_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&bond->mcast_work)->work) irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&bond->slave_arr_work)->work) irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#480 irq_context: 0 (wq_completion)rcu_gp &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp &obj_hash[i].lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem remove_cache_srcu irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex devnet_rename_sem &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 _xmit_ETHER krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&br->hello_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&br->topology_change_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&br->tcn_timer) irq_context: softirq (&app->join_timer) &app->lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu &____s->seqcount irq_context: 0 rtnl_mutex remove_cache_srcu &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1164 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1164 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1164 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1164 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1164 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pmc->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events reg_work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events reg_work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &base->lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1164 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1164 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1165 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#153 irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &simple_offset_xa_lock &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 pcpu_lock stock_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &net->packet.sklist_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock stock_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock key irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#20 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#153 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#150 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#148 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1165 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1165 irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &rq->__lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1165 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1166 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1166 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1166 irq_context: 0 &ep->mtx stock_lock irq_context: 0 &ep->mtx key irq_context: 0 &ep->mtx pcpu_lock irq_context: 0 &ep->mtx percpu_counters_lock irq_context: 0 &ep->mtx pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1166 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#473 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#471 irq_context: 0 &data->open_mutex triggers_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1151 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1151 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1151 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1152 irq_context: 0 sb_writers#7 kfence_freelist_lock irq_context: 0 sb_writers#7 &meta->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1542 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET pcpu_lock stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET6/1 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &tty->legacy_mutex devpts_mutex cdev_lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem remove_cache_srcu quarantine_lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem remove_cache_srcu &c->lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem remove_cache_srcu &n->list_lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx nfc_index_ida.xa_lock &c->lock irq_context: 0 misc_mtx nfc_index_ida.xa_lock pool_lock#2 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &type->i_mutex_dir_key/1 &c->lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock irq_context: 0 lock kernfs_idr_lock &n->list_lock irq_context: 0 lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) &ndev->lock batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 (wq_completion)events_unbound deferred_probe_work deferred_probe_mutex rcu_node_0 irq_context: 0 &xt[i].mutex &mm->mmap_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &____s->seqcount irq_context: 0 rtnl_mutex dev_addr_sem team->team_lock_key#72 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &____s->seqcount irq_context: 0 (wq_completion)events_unbound deferred_probe_work deferred_probe_mutex &rq->__lock irq_context: 0 (wq_completion)events (debug_obj_work).work &meta->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 krc.lock &obj_hash[i].lock irq_context: 0 tty_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rq->__lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound deferred_probe_work deferred_probe_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex _xmit_SIT irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock irq_context: 0 rtnl_mutex &ndev->lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &ndev->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &ndev->lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq &(&hdev->cmd_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&hdev->cmd_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tty_mutex &tty->legacy_mutex rcu_node_0 irq_context: 0 tty_mutex &tty->legacy_mutex &rcu_state.expedited_wq irq_context: 0 tty_mutex &tty->legacy_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &____s->seqcount irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &c->lock irq_context: 0 &x->wait#27 irq_context: 0 cb_lock remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 cb_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx console_lock console_srcu console_owner &port_lock_key irq_context: 0 misc_mtx console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#509 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex fill_pool_map-wait-type-override pool_lock irq_context: softirq &(&hdev->cmd_timer)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#503 irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 irq_context: 0 rtnl_mutex dev_addr_sem &br->lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock rcu_read_lock rhashtable_bucket irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock nl_table_lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock nl_table_wait.lock irq_context: 0 rtnl_mutex &bridge_netdev_addr_lock_key/1 irq_context: 0 rtnl_mutex &br->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->lock &base->lock irq_context: 0 rtnl_mutex &br->lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &bridge_netdev_addr_lock_key/1 pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 pool_lock#2 irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq (&brmctx->ip6_own_query.timer) irq_context: softirq (&brmctx->ip6_own_query.timer) &br->multicast_lock irq_context: softirq (&brmctx->ip4_own_query.timer) irq_context: softirq (&brmctx->ip4_own_query.timer) &br->multicast_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&in_dev->mr_ifc_timer) irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock &dir->lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock &ul->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &____s->seqcount#7 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &nf_conntrack_locks[i] irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock crngs.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &obj_hash[i].lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &dir->lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &tbl->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &n->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &____s->seqcount#8 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) batched_entropy_u32.lock irq_context: softirq (&in_dev->mr_ifc_timer) &obj_hash[i].lock irq_context: softirq (&in_dev->mr_ifc_timer) &base->lock irq_context: softirq (&in_dev->mr_ifc_timer) &base->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex genl_mutex.wait_lock irq_context: 0 cb_lock genl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock lock kernfs_idr_lock pool_lock#2 irq_context: 0 rlock-AF_INET irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#432 irq_context: 0 crtc_ww_class_mutex drm_connector_list_iter &dev->mode_config.connector_list_lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1069 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#950 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1039 irq_context: 0 &mdev->req_queue_mutex &dev->mutex#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &meta->lock irq_context: 0 cb_lock genl_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)gid-cache-wq &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#501 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1060 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex rcu_state.exp_wake_mutex.wait_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex.wait_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &p->pi_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq (&app->periodic_timer) irq_context: softirq (&app->periodic_timer) &app->lock irq_context: softirq (&app->periodic_timer) &app->lock &obj_hash[i].lock irq_context: softirq (&app->periodic_timer) &app->lock &base->lock irq_context: softirq (&app->periodic_timer) &app->lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock pool_lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &wb->work_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_state.barrier_mutex &obj_hash[i].lock irq_context: 0 rcu_state.barrier_mutex pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_node_0 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem stock_lock irq_context: 0 pernet_ops_rwsem nf_hook_mutex &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock key#23 irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock nl_table_wait.lock &p->pi_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &base->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rcu_state.expedited_wq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &ndev->lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex kfence_freelist_lock irq_context: 0 rtnl_mutex dev_addr_sem nl_table_wait.lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET6/1 key#25 irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_node_0 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bond0#67 irq_context: 0 (wq_completion)bond0#67 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock kfence_freelist_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock remove_cache_srcu irq_context: 0 (wq_completion)bond0#67 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#67 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#67 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1621 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1621 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1621 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1621 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1621 &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 qrtr_node_lock &rq->__lock irq_context: 0 qrtr_node_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1621 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fs_reclaim &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#794 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#794 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#784 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1624 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#784 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#784 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1624 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1624 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#780 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1624 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1622 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1622 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1622 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1622 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1622 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &base->lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1622 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1622 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1625 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1625 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1623 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1623 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#795 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#795 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#795 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#795 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#785 irq_context: 0 crtc_ww_class_mutex &file->master_lookup_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#781 irq_context: 0 rtnl_mutex dev_addr_sem team->team_lock_key#84 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock &n->list_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1626 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1626 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1626 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->master_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1147 irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_node_0 irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &p->pi_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &p->pi_lock &rq->__lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock stock_lock irq_context: 0 __ip_vs_mutex __ip_vs_mutex.wait_lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &____s->seqcount#2 irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq irq_context: 0 sk_lock-AF_INET key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu pool_lock#2 irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &____s->seqcount#2 irq_context: 0 __ip_vs_mutex.wait_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock key irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock pcpu_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &c->lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &____s->seqcount irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock percpu_counters_lock irq_context: softirq &(&br->gc_work)->timer irq_context: softirq &(&br->gc_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&br->gc_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&br->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&br->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&br->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_long (work_completion)(&(&br->gc_work)->work) irq_context: 0 (wq_completion)events_long (work_completion)(&(&br->gc_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&br->gc_work)->work) &base->lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&br->gc_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock pcpu_lock stock_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1027 &rq->__lock irq_context: 0 rtnl_mutex _xmit_TUNNEL irq_context: 0 rtnl_mutex dev_addr_sem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1027 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock console_owner_lock irq_context: 0 &lruvec->lru_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 cb_lock console_owner irq_context: 0 (wq_completion)nfc3_nci_tx_wq#472 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1028 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1029 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1029 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip4_mc_router_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip4_other_query.timer) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 pin_fs_lock irq_context: 0 rtnl_mutex _xmit_IPGRE irq_context: 0 rtnl_mutex &ndev->lock &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 ebt_mutex &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 ebt_mutex &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cgroup_threadgroup_rwsem stock_lock irq_context: 0 cgroup_threadgroup_rwsem pcpu_lock stock_lock irq_context: 0 &tsk->futex_exit_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip4_other_query.delay_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip4_own_query.timer) irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &hsr->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem net_rwsem &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &____s->seqcount irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &c->lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu irq_context: 0 &p->lock remove_cache_srcu &meta->lock irq_context: 0 &p->lock remove_cache_srcu kfence_freelist_lock irq_context: 0 sk_lock-AF_INET pcpu_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock batched_entropy_u8.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &____s->seqcount#2 irq_context: softirq (&hsr->announce_timer) rcu_read_lock batched_entropy_u8.lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock kfence_freelist_lock irq_context: 0 rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock genl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rt6_exception_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &pcp->lock &zone->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex _xmit_IPGRE &c->lock irq_context: 0 rtnl_mutex _xmit_IPGRE &____s->seqcount irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock &c->lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock &n->list_lock irq_context: 0 rtnl_mutex &tb->tb6_lock rcu_read_lock &n->list_lock &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock &____s->seqcount irq_context: 0 rtnl_mutex _xmit_TUNNEL6 irq_context: 0 rtnl_mutex &ndev->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &ndev->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &tb->tb6_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock &meta->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock kfence_freelist_lock irq_context: 0 rtnl_mutex dev_addr_sem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock batched_entropy_u32.lock crngs.lock irq_context: 0 rtnl_mutex &ndev->lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &ndev->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &tb->tb6_lock quarantine_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock pool_lock#2 irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock crngs.lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock &base->lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &bond->stats_lock/1 irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &meta->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock kfence_freelist_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#406 irq_context: 0 rtnl_mutex devnet_rename_sem &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &c->lock irq_context: 0 &pipe->mutex/1 &pipe->wr_wait &ep->lock irq_context: 0 (wq_completion)events free_ipc_work sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1030 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1032 irq_context: 0 rtnl_mutex devnet_rename_sem &pcp->lock &zone->lock irq_context: softirq rcu_callback stock_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 rfkill_global_mutex rfkill_global_mutex.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock batched_entropy_u8.lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock kfence_freelist_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1147 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1147 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1148 irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tty->legacy_mutex devpts_mutex &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &p->pi_lock irq_context: 0 drm_connector_list_iter rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#477 irq_context: 0 rtnl_mutex &ul->lock#2 &c->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 quarantine_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock pool_lock#2 irq_context: 0 &dev->mutex rfkill_global_mutex &cfs_rq->removed.lock irq_context: 0 &dev->mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#477 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 &lo->lo_mutex &rq->__lock irq_context: 0 elock-AF_INET irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) rcu_node_0 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 key irq_context: 0 sb_writers#3 pcpu_lock irq_context: 0 sb_writers#3 percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &meta->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock kfence_freelist_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &____s->seqcount irq_context: 0 &lo->lo_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1152 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock batched_entropy_u8.lock crngs.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &dentry->d_lock &wq#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &xa->xa_lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &fs->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &fs->lock &dentry->d_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &dentry->d_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &dentry->d_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock pool_lock#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &c->lock irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock &wq#2 irq_context: 0 rtnl_mutex fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &s->s_inode_list_lock irq_context: 0 rtnl_mutex fs_reclaim &cfs_rq->removed.lock irq_context: 0 rtnl_mutex fs_reclaim &obj_hash[i].lock irq_context: 0 rtnl_mutex fs_reclaim pool_lock#2 irq_context: softirq (&app->join_timer) &app->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&app->join_timer) &app->lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock &n->list_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock &n->list_lock &c->lock irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pcpu_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &n->list_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER &____s->seqcount irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 tk_core.seq.seqcount irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem quarantine_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex (inet6addr_validator_chain).rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->ipv6.addrconf_hash_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ifa->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock crngs.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq &list->lock#12 irq_context: softirq &(&hwstats->traffic_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &____s->seqcount irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &____s->seqcount#9 irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER pool_lock#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex dev_addr_sem &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock deferred_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock (console_sem).lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock console_lock console_srcu console_owner irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &br->multicast_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &br->multicast_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &br->multicast_lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock lweventlist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: softirq (&pmctx->ip6_own_query.timer) irq_context: softirq (&pmctx->ip6_own_query.timer) &br->multicast_lock irq_context: softirq (&pmctx->ip4_own_query.timer) irq_context: softirq (&pmctx->ip4_own_query.timer) &br->multicast_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 rtnl_mutex dev_addr_sem &tn->lock &rq->__lock irq_context: 0 (wq_completion)events deferred_process_work &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 quarantine_lock irq_context: softirq (&peer->timer_persistent_keepalive) init_task.mems_allowed_seq.seqcount irq_context: 0 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex &wg->device_update_lock &rnp->exp_lock irq_context: softirq rcu_read_lock &br->hash_lock irq_context: softirq rcu_read_lock &br->hash_lock pool_lock#2 irq_context: softirq rcu_read_lock &br->hash_lock rcu_read_lock rhashtable_bucket irq_context: softirq rcu_read_lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq rcu_read_lock &br->hash_lock nl_table_lock irq_context: softirq rcu_read_lock &br->hash_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &br->hash_lock nl_table_wait.lock irq_context: softirq rcu_read_lock &br->multicast_lock irq_context: softirq rcu_read_lock &br->multicast_lock pool_lock#2 irq_context: softirq rcu_read_lock &br->multicast_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &br->multicast_lock rcu_read_lock rhashtable_bucket irq_context: softirq rcu_read_lock &br->multicast_lock &obj_hash[i].lock pool_lock irq_context: softirq rcu_read_lock &br->multicast_lock &dir->lock#2 irq_context: softirq rcu_read_lock &br->multicast_lock deferred_lock irq_context: softirq rcu_read_lock &br->multicast_lock rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock &br->multicast_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &br->multicast_lock nl_table_lock irq_context: softirq rcu_read_lock &br->multicast_lock nl_table_wait.lock irq_context: softirq rcu_read_lock &br->multicast_lock &base->lock irq_context: softirq rcu_read_lock &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_read_lock &br->multicast_lock &c->lock irq_context: softirq rcu_read_lock &br->multicast_lock &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key#2/1 &n->list_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock deferred_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock nl_table_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock nl_table_wait.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &base->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock quarantine_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &x->wait#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &n->list_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &tb->tb6_lock &n->list_lock irq_context: 0 rtnl_mutex &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &____s->seqcount irq_context: softirq rcu_read_lock &br->hash_lock &c->lock irq_context: softirq rcu_read_lock &br->hash_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex _xmit_ETHER/1 irq_context: 0 rtnl_mutex _xmit_ETHER/1 _xmit_ETHER irq_context: 0 (wq_completion)events deferred_process_work &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount irq_context: 0 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override &n->list_lock irq_context: 0 tomoyo_ss remove_cache_srcu fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#471 irq_context: 0 crtc_ww_class_acquire &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &pcp->lock &zone->lock irq_context: 0 rtnl_mutex dev_addr_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &____s->seqcount irq_context: 0 &mm->mmap_lock sb_writers#3 &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock sb_writers#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock sb_writers#3 &____s->seqcount irq_context: 0 rtnl_mutex &ndev->lock &n->list_lock irq_context: 0 rtnl_mutex &ndev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dentry->d_lock &obj_hash[i].lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: softirq rcu_read_lock quarantine_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &hsr->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &new_node->seq_out_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock key irq_context: 0 &mm->mmap_lock rcu_read_lock pcpu_lock irq_context: 0 &mm->mmap_lock rcu_read_lock percpu_counters_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: softirq (&hsr->announce_timer) irq_context: softirq (&hsr->announce_timer) rcu_read_lock pool_lock#2 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &new_node->seq_out_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock pool_lock#2 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock &obj_hash[i].lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock pool_lock#2 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &obj_hash[i].lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &base->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock krc.lock &base->lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex _xmit_ETHER/1 &c->lock irq_context: 0 rtnl_mutex _xmit_ETHER/1 &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &ndev->lock irq_context: softirq rcu_read_lock &br->multicast_lock &n->list_lock irq_context: softirq rcu_read_lock &br->multicast_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex key irq_context: 0 rtnl_mutex percpu_counters_lock irq_context: 0 rtnl_mutex &nn->netlink_tap_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 rcu_read_lock rcu_read_lock_bh &pcp->lock &zone->lock irq_context: 0 rcu_read_lock rcu_read_lock_bh &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rcu_read_lock rcu_read_lock_bh &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex mount_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex quarantine_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &n->list_lock &c->lock irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq drivers/net/wireguard/ratelimiter.c:20 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &meta->lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 _xmit_ETHER &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 _xmit_ETHER &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &new_node->seq_out_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock pool_lock#2 irq_context: 0 sb_writers#3 &n->list_lock irq_context: 0 sb_writers#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &n->list_lock irq_context: 0 rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &c->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex j1939_netdev_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &bat_priv->tt.changes_list_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &bat_priv->tt.changes_list_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &bat_priv->tt.changes_list_lock pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock key#16 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &batadv_netdev_addr_lock_key/1 irq_context: 0 &mm->mmap_lock &folio_wait_table[i] irq_context: 0 &mm->mmap_lock &folio_wait_table[i] &p->pi_lock irq_context: 0 &mm->mmap_lock &folio_wait_table[i] &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &folio_wait_table[i] &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq rcu_read_lock &br->multicast_lock &pcp->lock &zone->lock irq_context: softirq rcu_read_lock &br->multicast_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem key irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem percpu_counters_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 pool_lock#2 irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 &c->lock irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER (console_sem).lock irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER console_lock console_srcu console_owner irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 irq_context: 0 rtnl_mutex _xmit_ETHER (console_sem).lock irq_context: 0 rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner irq_context: 0 rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex _xmit_ETHER console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq rcu_read_lock &list->lock#13 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem quarantine_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &list->lock#13 irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) pool_lock#2 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) quarantine_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &ipvlan->addrs_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &ipvlan->addrs_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &ipvlan->addrs_lock irq_context: 0 rtnl_mutex rcu_read_lock &ipvlan->addrs_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 irq_context: 0 &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh pool_lock#2 irq_context: 0 &mm->mmap_lock rcu_read_lock key irq_context: 0 &mm->mmap_lock rcu_read_lock pcpu_lock irq_context: 0 &mm->mmap_lock rcu_read_lock percpu_counters_lock irq_context: 0 (wq_completion)events (linkwatch_work).work &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 _xmit_ETHER irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem &hard_iface->bat_iv.ogm_buff_mutex irq_context: 0 rtnl_mutex key#20 irq_context: 0 rtnl_mutex &bat_priv->tt.commit_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 &sb->s_type->i_mutex_key#10 quarantine_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq (&hsr->announce_timer) rcu_read_lock &c->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &____s->seqcount irq_context: 0 rtnl_mutex mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &sb->s_type->i_lock_key#8 irq_context: 0 rtnl_mutex &dir->lock irq_context: 0 rtnl_mutex k-slock-AF_INET/1 irq_context: 0 rtnl_mutex k-sk_lock-AF_INET irq_context: 0 rtnl_mutex k-sk_lock-AF_INET k-slock-AF_INET#2 irq_context: 0 rtnl_mutex k-sk_lock-AF_INET &table->hash[i].lock irq_context: 0 rtnl_mutex k-sk_lock-AF_INET &table->hash[i].lock k-clock-AF_INET irq_context: 0 rtnl_mutex k-sk_lock-AF_INET &table->hash[i].lock &table->hash2[i].lock irq_context: 0 rtnl_mutex k-slock-AF_INET#2 irq_context: 0 rtnl_mutex rcu_read_lock (console_sem).lock irq_context: 0 rtnl_mutex rcu_read_lock console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex rcu_read_lock console_lock console_srcu console_owner irq_context: 0 rtnl_mutex rcu_read_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex rcu_read_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &n->list_lock irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex dev_addr_sem _xmit_ETHER &____s->seqcount irq_context: 0 rtnl_mutex k-sk_lock-AF_INET6 irq_context: 0 rtnl_mutex k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 rtnl_mutex k-slock-AF_INET6 irq_context: 0 rtnl_mutex k-sk_lock-AF_INET6 &table->hash[i].lock irq_context: 0 rtnl_mutex k-sk_lock-AF_INET6 &table->hash[i].lock k-clock-AF_INET6 irq_context: 0 rtnl_mutex k-sk_lock-AF_INET6 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock irq_context: 0 rtnl_mutex &wg->device_update_lock fs_reclaim irq_context: 0 rtnl_mutex &wg->device_update_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &wg->device_update_lock pool_lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex &wg->device_update_lock &sb->s_type->i_lock_key#8 irq_context: 0 rtnl_mutex &wg->device_update_lock &____s->seqcount irq_context: 0 rtnl_mutex &wg->device_update_lock &dir->lock irq_context: 0 rtnl_mutex &wg->device_update_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-slock-AF_INET/1 irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET k-slock-AF_INET#2 irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET &table->hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET &table->hash[i].lock k-clock-AF_INET irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET &table->hash[i].lock &table->hash2[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-slock-AF_INET#2 irq_context: 0 rtnl_mutex &wg->device_update_lock cpu_hotplug_lock irq_context: 0 rtnl_mutex &wg->device_update_lock cpu_hotplug_lock jump_label_mutex irq_context: 0 rtnl_mutex &wg->device_update_lock cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 rtnl_mutex &wg->device_update_lock k-slock-AF_INET6 irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 &table->hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 &table->hash[i].lock k-clock-AF_INET6 irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock &wg->socket_update_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 rtnl_mutex &wg->device_update_lock &list->lock#14 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock &c->lock irq_context: 0 crtc_ww_class_acquire &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex quarantine_lock irq_context: 0 sk_lock-AF_INET fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 crtc_ww_class_acquire &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem remove_cache_srcu pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &ct->lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem remove_cache_srcu &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 stock_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 rtnl_mutex &root->kernfs_rwsem quarantine_lock irq_context: 0 &list->lock#21 irq_context: 0 &tun->readq irq_context: 0 &node->ep_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER &obj_hash[i].lock pool_lock irq_context: 0 qrtr_nodes_lock irq_context: 0 &node->qrtr_tx_lock irq_context: 0 qrtr_node_lock irq_context: 0 qrtr_node_lock qrtr_nodes_lock irq_context: 0 sk_lock-AF_INET clock-AF_INET irq_context: 0 sk_lock-AF_INET &sctp_port_hashtable[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1162 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1162 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1162 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1163 irq_context: 0 rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1167 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1146 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 batched_entropy_u32.lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock &____s->seqcount#9 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#14 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 rtnl_mutex dev_addr_sem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh key#20 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &entry->crc_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1146 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1146 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock stock_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock key irq_context: 0 sk_lock-AF_INET rcu_read_lock pcpu_lock irq_context: 0 sk_lock-AF_INET &sctp_port_hashtable[i].lock &____s->seqcount#2 irq_context: 0 sk_lock-AF_INET &sctp_port_hashtable[i].lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: 0 sk_lock-AF_INET &sctp_port_hashtable[i].lock &c->lock irq_context: 0 sk_lock-AF_INET &sctp_port_hashtable[i].lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 krc.lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 &sctp_port_hashtable[i].lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &n->list_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 &sctp_port_hashtable[i].lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 &sctp_port_hashtable[i].lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock (worker)->lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start stock_lock irq_context: softirq rcu_read_lock rcu_read_lock &r->producer_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start key irq_context: 0 sk_lock-AF_INET rcu_read_lock percpu_counters_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1031 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rq->__lock &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &pcp->lock &zone->lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex rcu_node_0 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &rcu_state.expedited_wq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &c->lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &pcp->lock &zone->lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 &obj_hash[i].lock pool_lock irq_context: softirq init_task.mems_allowed_seq.seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->lock lweventlist_lock &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1039 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#405 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#405 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1034 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1034 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1251 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock &n->list_lock irq_context: 0 rtnl_mutex uevent_sock_mutex fs_reclaim &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 &pipe->mutex/1 &pipe->wr_wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &pipe->wr_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 &n->list_lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 &n->list_lock &c->lock irq_context: 0 uevent_sock_mutex remove_cache_srcu irq_context: 0 uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 &data->open_mutex remove_cache_srcu irq_context: 0 &data->open_mutex remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex rfkill_global_mutex dpm_list_mtx &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock &n->list_lock &c->lock irq_context: 0 sk_lock-AF_INET &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex rcu_node_0 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1196 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &rq->__lock irq_context: 0 tty_mutex &tty->legacy_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET sched_map-wait-type-override &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1038 irq_context: 0 rtnl_mutex deferred_probe_mutex &rq->__lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sk_lock-AF_INET percpu_counters_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &pool->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex pool_lock#2 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex deferred_probe_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#905 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#905 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#905 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#905 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#905 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#907 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#906 irq_context: 0 &xt[i].mutex remove_cache_srcu rcu_node_0 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh &r->producer_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &wb->work_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &wb->work_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#906 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#906 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &keypair->receiving_counter.lock irq_context: softirq &peer->keypairs.keypair_update_lock irq_context: softirq &list->lock#14 irq_context: softirq rcu_read_lock_bh &r->producer_lock#2 irq_context: softirq rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock remove_cache_srcu &base->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#906 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#906 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#908 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#907 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#907 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#907 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#907 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#347 irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock quarantine_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: softirq &(&conn->disc_work)->timer irq_context: softirq &(&conn->disc_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&conn->disc_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&conn->disc_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &____s->seqcount irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 &c->lock irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &ul->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1060 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#347 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#342 irq_context: 0 tty_mutex &tty->legacy_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &meta->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock kfence_freelist_lock irq_context: 0 &fsnotify_mark_srcu stock_lock irq_context: 0 &fsnotify_mark_srcu pcpu_lock stock_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex fs_reclaim &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &base->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#342 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem bit_wait_table + i irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &____s->seqcount#2 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#342 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#904 irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_state_lock irq_context: 0 sb_internal &journal->j_state_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &c->lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex kfence_freelist_lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &meta->lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &rq->__lock irq_context: 0 &data->open_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rq->__lock &base->lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 &data->open_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex uevent_sock_mutex &rq->__lock irq_context: 0 &data->open_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 misc_mtx &dev->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire batched_entropy_u8.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#904 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#904 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&conn->disc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#904 irq_context: softirq &(&conn->disc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#904 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#906 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#340 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire kfence_freelist_lock irq_context: 0 sb_internal &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_internal &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_internal &journal->j_state_lock &base->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &meta->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#909 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex kfence_freelist_lock irq_context: 0 sb_internal &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#908 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#908 irq_context: 0 rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock &mapping->i_private_lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock &ret->b_state_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#908 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#910 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#910 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#910 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#909 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#909 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#909 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex smc_ib_devices.mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex smc_ib_devices.mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock pool_lock#2 irq_context: 0 &f->f_pos_lock sb_writers#4 batched_entropy_u8.lock irq_context: 0 &f->f_pos_lock pool_lock#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &base->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#911 irq_context: 0 rcu_state.exp_mutex key irq_context: 0 misc_mtx batched_entropy_u8.lock crngs.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#348 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#348 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#343 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#343 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#343 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#341 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET &pnsocks.lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET &pnsocks.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock krc.lock &base->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire &pcp->lock &zone->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 &obj_hash[i].lock pool_lock irq_context: 0 cb_lock remove_cache_srcu &meta->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock remove_cache_srcu kfence_freelist_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#341 &rq->__lock irq_context: softirq rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: softirq rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx rfkill_global_mutex irq_context: 0 misc_mtx rfkill_global_mutex fs_reclaim irq_context: 0 misc_mtx rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx rfkill_global_mutex pool_lock#2 irq_context: 0 misc_mtx rfkill_global_mutex &rfkill->lock irq_context: 0 misc_mtx rfkill_global_mutex &data->mtx irq_context: 0 cb_lock genl_mutex rcu_read_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 cb_lock rcu_read_lock pool_lock#2 irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 cb_lock genl_mutex hwsim_radio_lock irq_context: 0 cb_lock genl_mutex &x->wait#9 irq_context: 0 cb_lock genl_mutex batched_entropy_u32.lock irq_context: 0 cb_lock genl_mutex &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex &k->list_lock irq_context: 0 cb_lock genl_mutex gdp_mutex irq_context: 0 cb_lock genl_mutex gdp_mutex &k->list_lock irq_context: 0 cb_lock genl_mutex lock irq_context: 0 cb_lock genl_mutex lock kernfs_idr_lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 cb_lock genl_mutex bus_type_sem irq_context: 0 cb_lock genl_mutex sysfs_symlink_target_lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem irq_context: 0 cb_lock genl_mutex &dev->power.lock irq_context: 0 cb_lock genl_mutex dpm_list_mtx irq_context: 0 cb_lock genl_mutex uevent_sock_mutex irq_context: 0 cb_lock genl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &c->lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 cb_lock genl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex subsys mutex#53 irq_context: 0 cb_lock genl_mutex subsys mutex#53 &k->k_lock irq_context: 0 cb_lock genl_mutex lock kernfs_idr_lock &c->lock irq_context: 0 cb_lock genl_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex device_links_lock irq_context: 0 cb_lock genl_mutex &k->k_lock irq_context: 0 cb_lock genl_mutex deferred_probe_mutex irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex pcpu_alloc_mutex irq_context: 0 cb_lock genl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex wq_pool_mutex irq_context: 0 cb_lock genl_mutex wq_pool_mutex &wq->mutex irq_context: 0 cb_lock genl_mutex crngs.lock irq_context: 0 cb_lock genl_mutex triggers_list_lock irq_context: 0 cb_lock genl_mutex leds_list_lock irq_context: 0 cb_lock genl_mutex &zone->lock irq_context: 0 cb_lock genl_mutex &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc3_nci_tx_wq#341 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#910 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 &c->lock irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#910 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#910 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#910 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex lock kernfs_idr_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &ul->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#970 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex param_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex param_lock rate_ctrl_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex (console_sem).lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &k->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &k->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock kernfs_idr_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex kobj_ns_type_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx bus_type_sem irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx sysfs_symlink_target_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &dev->power.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_wait.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &k->k_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 &k->k_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx pin_fs_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx nl_table_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx reg_requests_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &base->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &k->list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex lock kernfs_idr_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 cb_lock genl_mutex rfkill_global_mutex bus_type_sem irq_context: 0 cb_lock genl_mutex rfkill_global_mutex sysfs_symlink_target_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &____s->seqcount irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &dev->power.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex dpm_list_mtx irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &rfkill->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex nl_table_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &k->k_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex subsys mutex#40 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex subsys mutex#40 &k->k_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex triggers_list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex leds_list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex.wait_lock irq_context: 0 cb_lock genl_mutex pin_fs_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 _xmit_ETHER pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx stack_depot_init_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx pcpu_alloc_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx pcpu_alloc_mutex pcpu_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &xa->xa_lock#4 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem &list->lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &tn->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &x->wait#9 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sem->wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 &k->k_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &dir->lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &dev->power.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dev_base_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx input_pool.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx batched_entropy_u32.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &tbl->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx sysctl_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx failover_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx proc_subdir_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx proc_inum_ida.xa_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx proc_subdir_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock _xmit_ETHER irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock _xmit_ETHER pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &pnettable->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx smc_ib_devices.mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &ndev->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &fq->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &local->iflist_mtx irq_context: 0 cb_lock genl_mutex (inetaddr_chain).rwsem irq_context: 0 cb_lock genl_mutex inet6addr_chain.lock irq_context: 0 cb_lock genl_mutex hwsim_radio_lock rcu_read_lock rhashtable_bucket irq_context: 0 cb_lock genl_mutex nl_table_lock irq_context: 0 cb_lock genl_mutex nl_table_wait.lock irq_context: 0 cb_lock rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 cb_lock rtnl_mutex &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 cb_lock rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock (console_sem).lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock console_lock console_srcu console_owner irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &wdev->event_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &rdev->mgmt_registrations_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx (&dwork->timer) irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &base->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &rdev->wiphy_work_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx pin_fs_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pin_fs_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mount_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mount_lock mount_lock.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &xa->xa_lock#9 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &obj_hash[i].lock pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &dentry->d_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &fsnotify_mark_srcu irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_lock_key#7 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &s->s_inode_list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &xa->xa_lock#9 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock &dentry->d_lock/1 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx rcu_read_lock mount_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx mount_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx mount_lock mount_lock.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx (&dwork->timer)#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx (work_completion)(&(&link->color_collision_detect_work)->work) irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fs_reclaim irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &fq->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx nl_table_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 cb_lock rtnl_mutex.wait_lock irq_context: 0 cb_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock key#16 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.changes_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.changes_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.changes_list_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.last_changeset_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.last_changeset_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tvlv.container_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: 0 cb_lock &rdev->wiphy.mtx rtnl_mutex.wait_lock irq_context: 0 cb_lock &rdev->wiphy.mtx &p->pi_lock irq_context: 0 cb_lock &rdev->wiphy.mtx &rdev->wiphy_work_lock irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &list->lock#15 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &ifibss->incomplete_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx (console_sem).lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex dev_addr_sem fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex dev_addr_sem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex _xmit_ETHER console_owner_lock irq_context: 0 rtnl_mutex _xmit_ETHER console_owner irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &data->mutex irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &base->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &wdev->event_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)cfg80211 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &wdev->event_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &rdev->bss_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx fs_reclaim irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx pool_lock#2 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &c->lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &n->list_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx nl_table_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &list->lock#2 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &rq->__lock irq_context: 0 (wq_completion)events wireless_nlevent_work irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem pool_lock#2 irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem nl_table_lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem nl_table_wait.lock irq_context: 0 rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &list->lock#12 irq_context: softirq (&tun->flow_gc_timer) irq_context: softirq (&tun->flow_gc_timer) &tun->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex kfence_freelist_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 cb_lock genl_mutex hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex hwsim_radio_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock hwsim_radio_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &c->lock irq_context: 0 cb_lock rcu_read_lock &c->lock irq_context: 0 cb_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock &ndev->lock irq_context: 0 cb_lock &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 cb_lock &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &list->lock#15 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &ifibss->incomplete_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx (console_sem).lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_owner_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_owner irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &data->mutex irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &wdev->event_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &____s->seqcount irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.gp_wq irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock crngs.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &fq->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->queue_stop_reason_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &list->lock#16 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &list->lock#16 irq_context: softirq &list->lock#16 irq_context: softirq rcu_read_lock lock#6 irq_context: softirq rcu_read_lock lock#6 kcov_remote_lock irq_context: softirq rcu_read_lock &ifibss->incomplete_lock irq_context: softirq rcu_read_lock &rdev->wiphy_work_lock irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: softirq rcu_read_lock &local->rx_path_lock irq_context: softirq rcu_read_lock &local->rx_path_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &local->rx_path_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sta->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx pin_fs_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx nl_table_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#970 irq_context: 0 rcu_state.exp_mutex pcpu_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_state.exp_mutex percpu_counters_lock irq_context: 0 rtnl_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wq->mutex stock_lock irq_context: 0 rtnl_mutex &wq->mutex &obj_hash[i].lock irq_context: 0 sb_writers#3 jbd2_handle stock_lock irq_context: 0 sb_writers#3 jbd2_handle key irq_context: 0 sb_writers#3 jbd2_handle pcpu_lock irq_context: 0 sb_writers#3 jbd2_handle percpu_counters_lock irq_context: 0 sb_writers#3 jbd2_handle pcpu_lock stock_lock irq_context: 0 rtnl_mutex &wq->mutex key irq_context: 0 rtnl_mutex &wq->mutex pcpu_lock irq_context: 0 rtnl_mutex &wq->mutex percpu_counters_lock irq_context: 0 rtnl_mutex &wq->mutex pcpu_lock stock_lock irq_context: 0 rtnl_mutex &wq->mutex pool_lock#2 irq_context: 0 rtnl_mutex &wq->mutex &cfs_rq->removed.lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1040 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 &n->list_lock &c->lock irq_context: 0 rtnl_mutex _xmit_ETHER batched_entropy_u8.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#912 irq_context: 0 rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &type->s_umount_key#46/1 irq_context: 0 &type->s_umount_key#46/1 fs_reclaim irq_context: 0 &type->s_umount_key#46/1 fs_reclaim &rq->__lock irq_context: 0 &type->s_umount_key#46/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#46/1 &c->lock irq_context: 0 &type->s_umount_key#46/1 pool_lock#2 irq_context: 0 &type->s_umount_key#46/1 pcpu_alloc_mutex irq_context: 0 &type->s_umount_key#46/1 pcpu_alloc_mutex pcpu_lock irq_context: 0 &type->s_umount_key#46/1 shrinker_mutex irq_context: 0 &type->s_umount_key#46/1 list_lrus_mutex irq_context: 0 &type->s_umount_key#46/1 sb_lock irq_context: 0 &type->s_umount_key#46/1 sb_lock unnamed_dev_ida.xa_lock irq_context: 0 &type->s_umount_key#46/1 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_lock_key#32 irq_context: 0 &type->s_umount_key#46/1 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#46/1 tk_core.seq.seqcount irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_lock_key#32 &dentry->d_lock irq_context: 0 &type->s_umount_key#46/1 &____s->seqcount irq_context: 0 &type->s_umount_key#46/1 binderfs_minors_mutex irq_context: 0 &type->s_umount_key#46/1 binderfs_minors_mutex binderfs_minors.xa_lock irq_context: 0 &type->s_umount_key#46/1 &dentry->d_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &sb->s_type->i_lock_key#32 irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 rename_lock.seqcount irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 fs_reclaim irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 pool_lock#2 irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &dentry->d_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &dentry->d_lock &wq irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &sb->s_type->i_lock_key#32 &dentry->d_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 mmu_notifier_invalidate_range_start irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 rcu_read_lock iunique_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 tk_core.seq.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 rename_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 fs_reclaim irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 &dentry->d_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 &root->kernfs_rwsem irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex fs_reclaim irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex pool_lock#2 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &n->list_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem tk_core.seq.seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &obj_hash[i].lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex css_set_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex cgroup_file_kn_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &____s->seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex batched_entropy_u32.lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex lock cgroup_idr_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex cgroup_idr_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex task_group_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &rq->__lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 rcu_read_lock &dentry->d_lock &lru->node[i].lock irq_context: 0 &type->i_mutex_dir_key#6 irq_context: 0 &type->i_mutex_dir_key#6 fs_reclaim irq_context: 0 &type->i_mutex_dir_key#6 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#6 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#6 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem inode_hash_lock irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem fs_reclaim irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#30 irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem &sb->s_type->i_lock_key#30 irq_context: 0 &type->i_mutex_dir_key#6 &sb->s_type->i_lock_key#30 irq_context: 0 &type->i_mutex_dir_key#6 &sb->s_type->i_lock_key#30 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#6 &sb->s_type->i_lock_key#30 &dentry->d_lock &wq irq_context: 0 kn->active#48 fs_reclaim irq_context: 0 kn->active#48 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#49 fs_reclaim irq_context: 0 kn->active#49 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.rss.gp_wait.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &x->wait#3 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&hsr->prune_timer) irq_context: softirq (&hsr->prune_timer) &hsr->list_lock irq_context: softirq (&hsr->prune_timer) &obj_hash[i].lock irq_context: softirq (&hsr->prune_timer) &base->lock irq_context: softirq (&hsr->prune_timer) &base->lock &obj_hash[i].lock irq_context: softirq rcu_callback rlock-AF_NETLINK irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[0] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock rcu_read_lock_bh &base->lock irq_context: 0 rcu_read_lock rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex _xmit_ETHER kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#911 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &____s->seqcount#7 irq_context: 0 rcu_read_lock &nf_nat_locks[i] irq_context: 0 rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET remove_cache_srcu irq_context: 0 rtnl_mutex sk_lock-AF_INET remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &wb->work_lock &base->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &wb->work_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock kfence_freelist_lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PHONET irq_context: 0 sk_lock-AF_PHONET slock-AF_PHONET irq_context: 0 slock-AF_PHONET irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &obj_hash[i].lock pool_lock irq_context: 0 rcu_read_lock &dir->lock#2 irq_context: 0 rcu_read_lock &ul->lock irq_context: 0 &mm->mmap_lock &base->lock irq_context: 0 &mm->mmap_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock pcpu_lock stock_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 pool_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 br_ioctl_mutex rtnl_mutex irq_context: 0 br_ioctl_mutex rtnl_mutex &rq->__lock irq_context: 0 br_ioctl_mutex rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 br_ioctl_mutex rtnl_mutex &mm->mmap_lock irq_context: 0 sb_internal &c->lock irq_context: 0 sb_internal remove_cache_srcu pool_lock#2 irq_context: 0 rcu_read_lock &____s->seqcount#9 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &____s->seqcount#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &____s->seqcount irq_context: 0 &data->open_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 &data->open_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work pool_lock#2 irq_context: softirq rcu_read_lock hwsim_radio_lock &c->lock irq_context: softirq rcu_read_lock hwsim_radio_lock &____s->seqcount irq_context: softirq rcu_read_lock hwsim_radio_lock pool_lock#2 irq_context: softirq rcu_read_lock hwsim_radio_lock &list->lock#16 irq_context: softirq rcu_read_lock &local->rx_path_lock &list->lock#15 irq_context: softirq rcu_read_lock &local->rx_path_lock &rdev->wiphy_work_lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 sk_lock-AF_INET krc.lock irq_context: 0 misc_mtx &wq->mutex &rq->__lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#72 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#911 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lock#6 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lock#6 kcov_remote_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock krc.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock pool_lock#2 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem inode_hash_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock krc.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fs_reclaim irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem pool_lock#2 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem inode_hash_lock &sb->s_type->i_lock_key#30 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem tk_core.seq.seqcount irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &sb->s_type->i_lock_key#30 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &s->s_inode_list_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &xa->xa_lock#9 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &fsnotify_mark_srcu irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &c->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &pcp->lock &zone->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &____s->seqcount irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock kernfs_notify_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock &base->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &p->pi_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[1] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events kernfs_notify_work &root->kernfs_supers_rwsem inode_hash_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#911 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_tx_wq#913 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#349 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#349 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#344 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#342 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#97 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#97 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#94 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#93 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#912 irq_context: 0 &data->open_mutex wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work fill_pool_map-wait-type-override pool_lock irq_context: 0 ebt_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.rss.gp_wait.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.rss.gp_wait.lock &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex (wq_completion)cpuset_migrate_mm irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex &wq->mutex irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex &wq->mutex &pool->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex &wq->mutex &x->wait#10 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 rename_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &dentry->d_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_rwsem irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 tomoyo_ss irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem tk_core.seq.seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex css_set_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex lock cgroup_idr_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cgroup_idr_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock cpuset_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock cpuset_mutex jump_label_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock cpuset_mutex jump_label_mutex patch_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock cpuset_mutex callback_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &____s->seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex percpu_counters_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock jump_label_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock jump_label_mutex patch_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex shrinker_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex shrinker_mutex fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex shrinker_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex shrinker_mutex pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &____s->seqcount#2 irq_context: 0 rcu_read_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 rcu_read_lock &dentry->d_lock &lru->node[i].lock irq_context: 0 &type->i_mutex_dir_key#7 irq_context: 0 &type->i_mutex_dir_key#7 fs_reclaim irq_context: 0 &type->i_mutex_dir_key#7 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#7 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#7 rcu_read_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem inode_hash_lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem fs_reclaim irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem inode_hash_lock &sb->s_type->i_lock_key#31 irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem inode_hash_lock &s->s_inode_list_lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &sb->s_type->i_lock_key#31 irq_context: 0 &type->i_mutex_dir_key#7 &sb->s_type->i_lock_key#31 irq_context: 0 &type->i_mutex_dir_key#7 &sb->s_type->i_lock_key#31 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key#7 &sb->s_type->i_lock_key#31 &dentry->d_lock &wq irq_context: 0 kn->active#50 fs_reclaim irq_context: 0 kn->active#50 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.rss.gp_wait.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fs_reclaim irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem pool_lock#2 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &c->lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock &____s->seqcount#2 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex &p->pi_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex &p->pi_lock &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex &p->alloc_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex &p->alloc_lock &____s->seqcount#2 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem cpuset_mutex cpuset_attach_wq.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock &obj_hash[i].lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock pool_lock#2 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock krc.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.rss.gp_wait.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex (wq_completion)cpuset_migrate_mm irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex &wq->mutex irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex &wq->mutex &pool->lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex &wq->mutex &x->wait#10 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &____s->seqcount#2 irq_context: 0 stock_lock irq_context: 0 &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &xa->xa_lock#5 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &xa->xa_lock#5 &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &xa->xa_lock#5 &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &xa->xa_lock#5 &____s->seqcount irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &xa->xa_lock#5 pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 stock_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &xa->xa_lock#5 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &xa->xa_lock#5 pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem stock_lock irq_context: 0 kn->active#51 fs_reclaim irq_context: 0 kn->active#51 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#51 stock_lock irq_context: 0 kn->active#51 pool_lock#2 irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &____s->seqcount irq_context: 0 kn->active#52 fs_reclaim irq_context: 0 kn->active#52 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 kn->active#52 stock_lock irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] fs_reclaim irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &c->lock irq_context: 0 sb_writers#9 &n->list_lock irq_context: 0 sb_writers#9 &n->list_lock &c->lock irq_context: 0 sb_writers#9 &of->mutex kn->active#52 memcg_max_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &xa->xa_lock#5 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &xa->xa_lock#5 pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &obj_hash[i].lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 stock_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex blkcg_pol_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex blkcg_pol_mutex fs_reclaim irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex blkcg_pol_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex blkcg_pol_mutex pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex blkcg_pol_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex blkcg_pol_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex devcgroup_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex cpu_hotplug_lock freezer_mutex irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex irq_context: 0 &f->f_pos_lock sb_writers#4 kfence_freelist_lock irq_context: 0 &f->f_pos_lock sb_writers#4 &meta->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#912 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#912 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#912 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#912 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#914 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#914 &rq->__lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#72 &devlink_port->type_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#914 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tty_mutex (work_completion)(&buf->work) &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 tty_mutex &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 cb_lock &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock batched_entropy_u8.lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rcu_state.gp_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock batched_entropy_u8.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#913 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#913 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#913 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#915 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#914 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#914 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#914 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#916 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#350 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#350 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#345 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#343 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#915 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#915 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#915 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#917 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#916 irq_context: 0 cb_lock genl_mutex &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#916 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#916 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#918 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#917 irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &rq->__lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu &n->list_lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#917 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#917 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#917 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1269 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#917 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#919 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#351 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#351 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rcu_state.gp_wq &p->pi_lock irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#346 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#344 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#918 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#918 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#918 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#918 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#918 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#920 irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &dum_hcd->dum->lock irq_context: 0 &dev->mutex &hub->status_mutex &dum_hcd->dum->lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#919 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#919 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#919 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#919 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#919 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#921 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#921 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#921 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 remove_cache_srcu &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#352 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#352 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#352 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock fs_reclaim rcu_node_0 irq_context: 0 &mm->mmap_lock fs_reclaim &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock fs_reclaim &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#352 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#347 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#345 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#920 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#920 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#920 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#920 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#920 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#920 &rq->__lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 &obj_hash[i].lock irq_context: 0 tomoyo_ss &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &____s->seqcount#2 irq_context: 0 &p->lock &of->mutex kn->active#4 &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh key#20 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &entry->crc_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#920 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#922 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#353 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#353 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#348 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1069 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#346 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 key irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 pcpu_lock irq_context: 0 sb_writers#4 tomoyo_ss &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &meta->lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 rcu_state.barrier_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock irq_context: 0 &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem stock_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_node_0 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock quarantine_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 percpu_counters_lock irq_context: 0 rtnl_mutex devnet_rename_sem key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &____s->seqcount#2 irq_context: softirq rcu_callback cgroup_threadgroup_rwsem.rss.gp_wait.lock irq_context: softirq rcu_callback cgroup_threadgroup_rwsem.rss.gp_wait.lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex rtnl_mutex.wait_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &p->pi_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#7 stock_lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &xa->xa_lock#5 irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &xa->xa_lock#5 pool_lock#2 irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem stock_lock irq_context: 0 kn->active#50 stock_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem freezer_mutex irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem freezer_mutex freezer_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &p->alloc_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &p->alloc_lock &newf->file_lock irq_context: 0 &xa->xa_lock#5 pool_lock#2 irq_context: 0 nf_hook_mutex irq_context: 0 nf_hook_mutex fs_reclaim irq_context: 0 nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nf_hook_mutex stock_lock irq_context: 0 nf_hook_mutex pool_lock#2 irq_context: 0 ebt_mutex &mm->mmap_lock irq_context: 0 nf_hook_mutex &c->lock irq_context: 0 nf_hook_mutex &____s->seqcount#2 irq_context: 0 nf_hook_mutex &____s->seqcount irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 ebt_mutex &mm->mmap_lock fs_reclaim irq_context: 0 ebt_mutex &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 ebt_mutex &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 ebt_mutex &mm->mmap_lock &____s->seqcount irq_context: 0 ebt_mutex &mm->mmap_lock stock_lock irq_context: 0 ebt_mutex &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 &sb->s_type->i_mutex_key#10 stock_lock irq_context: 0 &xt[i].mutex &mm->mmap_lock irq_context: 0 &xt[i].mutex free_vmap_area_lock irq_context: 0 &xt[i].mutex free_vmap_area_lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex free_vmap_area_lock pool_lock#2 irq_context: 0 &xt[i].mutex vmap_area_lock irq_context: 0 &xt[i].mutex &per_cpu(xt_recseq, i) irq_context: 0 &xt[i].mutex &obj_hash[i].lock irq_context: 0 &xt[i].mutex purge_vmap_area_lock irq_context: 0 &vma->vm_lock->lock stock_lock irq_context: 0 &xt[i].mutex &c->lock irq_context: 0 &xt[i].mutex &____s->seqcount#2 irq_context: 0 pcpu_alloc_mutex fs_reclaim irq_context: 0 pcpu_alloc_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 pcpu_alloc_mutex &____s->seqcount irq_context: 0 nf_nat_proto_mutex irq_context: 0 nf_nat_proto_mutex fs_reclaim irq_context: 0 nf_nat_proto_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nf_nat_proto_mutex pool_lock#2 irq_context: 0 nf_nat_proto_mutex nf_hook_mutex irq_context: 0 nf_nat_proto_mutex nf_hook_mutex fs_reclaim irq_context: 0 nf_nat_proto_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 nf_nat_proto_mutex nf_hook_mutex stock_lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex pool_lock#2 irq_context: 0 nf_nat_proto_mutex cpu_hotplug_lock irq_context: 0 nf_nat_proto_mutex &obj_hash[i].lock irq_context: 0 nf_nat_proto_mutex stock_lock irq_context: 0 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 pcpu_alloc_mutex free_vmap_area_lock irq_context: 0 pcpu_alloc_mutex vmap_area_lock irq_context: 0 pcpu_alloc_mutex init_mm.page_table_lock irq_context: 0 pcpu_alloc_mutex &c->lock irq_context: 0 pcpu_alloc_mutex &____s->seqcount#2 irq_context: 0 pcpu_alloc_mutex &pcp->lock &zone->lock irq_context: 0 pcpu_alloc_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 nf_nat_proto_mutex &c->lock irq_context: 0 &pipe->mutex/1 stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &xa->xa_lock#5 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &xa->xa_lock#5 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle stock_lock irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#9 irq_context: 0 &vma->vm_lock->lock &sb->s_type->i_lock_key irq_context: 0 &vma->vm_lock->lock &info->lock irq_context: 0 &vma->vm_lock->lock lock#4 irq_context: 0 &vma->vm_lock->lock tk_core.seq.seqcount irq_context: 0 &vma->vm_lock->lock mount_lock irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#9 pool_lock#2 irq_context: 0 loop_validate_mutex irq_context: 0 loop_validate_mutex &lo->lo_mutex irq_context: 0 &fsnotify_mark_srcu fs_reclaim irq_context: 0 &fsnotify_mark_srcu fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &fsnotify_mark_srcu &____s->seqcount irq_context: 0 &fsnotify_mark_srcu pool_lock#2 irq_context: 0 &fsnotify_mark_srcu &group->notification_lock irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock &ep->wq irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock &ep->wq &p->pi_lock irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET rcu_read_lock pool_lock#2 irq_context: 0 &xt[i].mutex &n->list_lock irq_context: 0 &xt[i].mutex &n->list_lock &c->lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock stock_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &sem->wait_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 pcpu_lock stock_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &tbl->lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &tbl->lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &tbl->lock batched_entropy_u32.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock &____s->seqcount#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#98 irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &xa->xa_lock#4 &n->list_lock &c->lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu pool_lock#2 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#98 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#95 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#94 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#921 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#921 irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx stack_depot_init_mutex &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 batched_entropy_u8.lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 kfence_freelist_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock &dir->lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock &ul->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex remove_cache_srcu pool_lock#2 irq_context: 0 kn->active#49 batched_entropy_u8.lock irq_context: 0 kn->active#49 kfence_freelist_lock irq_context: 0 &sb->s_type->i_mutex_key#3 &obj_hash[i].lock pool_lock irq_context: 0 br_ioctl_mutex rtnl_mutex &obj_hash[i].lock pool_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem stock_lock irq_context: 0 &group->notification_waitq &ep->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &____s->seqcount#2 irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex irq_context: 0 &mm->mmap_lock stock_lock irq_context: 0 hrtimer_bases.lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &____s->seqcount#2 irq_context: 0 &sb->s_type->i_mutex_key#9 &xa->xa_lock#5 irq_context: 0 &sb->s_type->i_mutex_key#9 &xa->xa_lock#5 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 stock_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 stock_lock irq_context: 0 &r->consumer_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &____s->seqcount#2 irq_context: 0 &mm->mmap_lock stock_lock irq_context: 0 rtnl_mutex &wg->device_update_lock &n->list_lock irq_context: 0 rtnl_mutex &wg->device_update_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: hardirq &vkms_out->lock &dev->event_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET6 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET6 pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (reg_check_chans).work irq_context: 0 (wq_completion)nfc2_nci_rx_wq#970 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#432 &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex devnet_rename_sem stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &msk->pm.lock irq_context: 0 &fsnotify_mark_srcu &____s->seqcount#2 irq_context: 0 &fsnotify_mark_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock batched_entropy_u8.lock irq_context: 0 &data->open_mutex rfkill_global_mutex &cfs_rq->removed.lock irq_context: 0 kn->active#14 &rq->__lock irq_context: 0 kn->active#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu &____s->seqcount irq_context: 0 &type->s_umount_key#47 quarantine_lock irq_context: softirq fs/notify/mark.c:89 rcu_read_lock &pool->lock irq_context: softirq fs/notify/mark.c:89 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq fs/notify/mark.c:89 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock kfence_freelist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (work_completion)(&msk->work) irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &meta->lock irq_context: 0 &data->open_mutex wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &____s->seqcount#2 irq_context: 0 rtnl_mutex &ndev->lock &____s->seqcount#2 irq_context: 0 rtnl_mutex dev_addr_sem &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &data->mutex &rq->__lock irq_context: 0 sk_lock-AF_PHONET fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sk_lock-AF_PHONET fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &____s->seqcount#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss stock_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#950 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#921 irq_context: 0 rtnl_mutex _xmit_PHONET_PIPE irq_context: 0 (wq_completion)nfc2_nci_tx_wq#923 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#922 irq_context: 0 misc_mtx nfc_devlist_mutex subsys mutex#39 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem quarantine_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 kfence_freelist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: softirq rcu_callback pcpu_lock stock_lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex lweventlist_lock kfence_freelist_lock irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#922 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu kfence_freelist_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#922 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#924 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#923 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip6_mc_router_timer) irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 kn->active#52 &____s->seqcount#2 irq_context: 0 &____s->seqcount#10 irq_context: 0 &sb->s_type->i_mutex_key#10 &ping_table.lock irq_context: 0 &mm->mmap_lock fs_reclaim stock_lock irq_context: 0 &mm->mmap_lock fs_reclaim key irq_context: 0 &mm->mmap_lock fs_reclaim pcpu_lock irq_context: 0 &mm->mmap_lock fs_reclaim percpu_counters_lock irq_context: 0 &mm->mmap_lock fs_reclaim pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &mapping->i_private_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#923 irq_context: hardirq &vkms_out->lock &dev->event_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#923 irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &cfs_rq->removed.lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex pool_lock#2 irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 kfence_freelist_lock irq_context: 0 rtnl_mutex devnet_rename_sem pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock/1 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) &base->lock irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &journal->j_state_lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&pool->idle_cull_work) pool_lock#2 irq_context: 0 rtnl_mutex &br->hash_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_tx_wq#925 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#924 irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &group->inotify_data.idr_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &group->inotify_data.idr_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &group->inotify_data.idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_node_0 irq_context: 0 &group->inotify_data.idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_read_lock &rq->__lock irq_context: 0 &rq->__lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx batched_entropy_u8.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &meta->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override pool_lock irq_context: 0 uts_sem &rq->__lock irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#9 stock_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex lweventlist_lock &____s->seqcount#2 irq_context: softirq rcu_callback stock_lock irq_context: softirq &(&conn->info_timer)->timer irq_context: softirq &(&conn->info_timer)->timer rcu_read_lock &pool->lock irq_context: softirq &(&conn->info_timer)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq &(&conn->info_timer)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&(&conn->info_timer)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&conn->info_timer)->work) &conn->chan_lock irq_context: 0 rtnl_mutex devnet_rename_sem pcpu_lock irq_context: 0 rtnl_mutex devnet_rename_sem percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 cb_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount#2 irq_context: 0 cb_lock &____s->seqcount#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 stock_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 proc_subdir_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &____s->seqcount#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &pcp->lock &zone->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &____s->seqcount irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 nf_sockopt_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) sched_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) sched_map-wait-type-override &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss key irq_context: 0 nf_sockopt_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss pcpu_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &u->iolock &rcu_state.expedited_wq irq_context: 0 &u->iolock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &u->iolock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &idev->mc_lock _xmit_ETHER &c->lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock irq_context: 0 misc_mtx nfc_index_ida.xa_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock crngs.lock irq_context: 0 misc_mtx pcpu_alloc_mutex irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock &nsim_dev->fa_cookie_lock irq_context: 0 misc_mtx pcpu_alloc_mutex pcpu_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock &n->list_lock irq_context: 0 misc_mtx cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex irq_context: 0 (wq_completion)bond0#72 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex fs_reclaim irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)bond0#72 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex pool_lock#2 irq_context: 0 (wq_completion)bond0#72 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)bond0#72 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex.wait_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex irq_context: 0 (wq_completion)bond0#72 (work_completion)(&(&slave->notify_work)->work) &p->pi_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 lweventlist_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)bond0#67 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 (wq_completion)bond0#67 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#67 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 lweventlist_lock &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_lock stock_lock irq_context: 0 wq_pool_attach_mutex.wait_lock irq_context: 0 wq_pool_attach_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock lweventlist_lock &n->list_lock irq_context: 0 wq_pool_attach_mutex &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 lweventlist_lock &dir->lock#2 irq_context: 0 cb_lock rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &zone->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) sched_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#924 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#924 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem pcpu_lock stock_lock irq_context: 0 &dev->mutex#3 &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work pool_lock#2 irq_context: 0 misc_mtx kthread_create_lock irq_context: 0 rtnl_mutex &batadv_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &x->wait irq_context: 0 misc_mtx wq_pool_mutex irq_context: 0 misc_mtx wq_pool_mutex &wq->mutex irq_context: 0 misc_mtx misc_mtx.wait_lock irq_context: 0 misc_mtx hrtimer_bases.lock irq_context: 0 misc_mtx hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 misc_mtx hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex dev_addr_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex _xmit_ETHER &local->filter_lock &c->lock irq_context: 0 rtnl_mutex _xmit_ETHER &local->filter_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex _xmit_ETHER &local->filter_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 misc_mtx nfc_devlist_mutex irq_context: 0 misc_mtx nfc_devlist_mutex fs_reclaim irq_context: 0 misc_mtx nfc_devlist_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &____s->seqcount#2 irq_context: 0 misc_mtx nfc_devlist_mutex pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex &k->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex gdp_mutex irq_context: 0 misc_mtx nfc_devlist_mutex gdp_mutex &k->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex lock irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 misc_mtx nfc_devlist_mutex bus_type_sem irq_context: 0 misc_mtx nfc_devlist_mutex sysfs_symlink_target_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem irq_context: 0 misc_mtx nfc_devlist_mutex &dev->power.lock irq_context: 0 misc_mtx nfc_devlist_mutex dpm_list_mtx irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex fs_reclaim irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex nl_table_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex subsys mutex#39 irq_context: 0 misc_mtx nfc_devlist_mutex subsys mutex#39 &k->k_lock irq_context: 0 misc_mtx &k->k_lock irq_context: 0 misc_mtx llcp_devices_lock irq_context: 0 misc_mtx &dev->mutex fs_reclaim irq_context: 0 misc_mtx &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx &dev->mutex pool_lock#2 irq_context: 0 misc_mtx &dev->mutex &x->wait#9 irq_context: 0 misc_mtx &dev->mutex &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &k->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex bus_type_sem irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex sysfs_symlink_target_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &____s->seqcount#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &dev->power.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex dpm_list_mtx irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rfkill->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex nl_table_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &k->k_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex subsys mutex#40 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex subsys mutex#40 &k->k_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex triggers_list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex leds_list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex.wait_lock irq_context: 0 misc_mtx &dev->mutex &p->pi_lock irq_context: 0 misc_mtx &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nl_table_lock irq_context: 0 misc_mtx nl_table_wait.lock irq_context: 0 misc_mtx.wait_lock irq_context: 0 cb_lock &rdev->wiphy.mtx &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock &rdev->wiphy.mtx &p->pi_lock &rq->__lock irq_context: 0 cb_lock &rdev->wiphy.mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rcu_read_lock rcu_node_0 irq_context: 0 cb_lock rcu_read_lock &rq->__lock irq_context: softirq rcu_read_lock hwsim_radio_lock &____s->seqcount#2 irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &tb->tb6_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip6_other_query.timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip6_other_query.delay_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&brmctx->ip6_own_query.timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bridge_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bridge_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bridge_netdev_addr_lock_key/1 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bridge_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &im->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 &lslist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->ipv4.ra_mutex irq_context: 0 &sb->s_type->i_mutex_key#10 &hashinfo->lock irq_context: 0 &ndev->req_lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &dccp_hashinfo.bhash[i].lock irq_context: 0 &ndev->req_lock &wq->mutex irq_context: 0 &ndev->req_lock &wq->mutex &pool->lock irq_context: 0 &ndev->req_lock &wq->mutex &x->wait#10 irq_context: 0 &ndev->req_lock (&ndev->cmd_timer) irq_context: 0 &ndev->req_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock &base->lock irq_context: 0 &ndev->req_lock (&ndev->data_timer) irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock irq_context: 0 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &lock->wait_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &lock->wait_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &c->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rq->__lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 &p->alloc_lock &x->wait &p->pi_lock &rq->__lock irq_context: 0 &p->alloc_lock &x->wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex (work_completion)(&rfkill->uevent_work) irq_context: 0 &dev->mutex rcu_read_lock rcu_node_0 irq_context: 0 &dev->mutex (work_completion)(&rfkill->sync_work) irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 &dev->mutex kernfs_idr_lock irq_context: 0 &dev->mutex &k->k_lock klist_remove_lock irq_context: 0 &dev->mutex subsys mutex#40 irq_context: 0 &dev->mutex subsys mutex#40 &k->k_lock irq_context: 0 &dev->mutex subsys mutex#40 &k->k_lock klist_remove_lock irq_context: 0 &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &rfkill->lock irq_context: 0 &dev->mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex uevent_sock_mutex &c->lock irq_context: 0 &dev->mutex uevent_sock_mutex pool_lock#2 irq_context: 0 &dev->mutex uevent_sock_mutex nl_table_lock irq_context: 0 &dev->mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 &dev->mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &dev->mutex rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 inode_hash_lock &sb->s_type->i_lock_key#24 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &____s->seqcount#2 irq_context: 0 cb_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 cb_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &batadv_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 rtnl_mutex sk_lock-AF_INET remove_cache_srcu &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem quarantine_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem key irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rq->__lock cpu_asid_lock irq_context: 0 &dev->mutex remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex sk_lock-AF_INET irq_context: 0 sb_writers#5 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock ptlock_ptr(ptdesc)#2 &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &sem->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock key#6 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal quarantine_lock irq_context: 0 &dev->mutex triggers_list_lock irq_context: 0 &dev->mutex leds_list_lock irq_context: 0 &local->sockets.lock irq_context: 0 &local->raw_sockets.lock irq_context: 0 (&local->link_timer) irq_context: 0 (work_completion)(&local->tx_work) irq_context: 0 (work_completion)(&local->rx_work) irq_context: 0 (work_completion)(&local->rx_work) &rq->__lock irq_context: 0 (work_completion)(&local->rx_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (work_completion)(&local->timeout_work) irq_context: 0 (&local->sdreq_timer) irq_context: 0 (work_completion)(&local->sdreq_timeout_work) irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 nfc_devlist_mutex kernfs_idr_lock irq_context: 0 nfc_devlist_mutex subsys mutex#39 &k->k_lock klist_remove_lock irq_context: 0 nfc_devlist_mutex &x->wait#9 irq_context: 0 nfc_devlist_mutex deferred_probe_mutex irq_context: 0 nfc_devlist_mutex device_links_lock irq_context: 0 nfc_devlist_mutex mmu_notifier_invalidate_range_start irq_context: 0 nfc_devlist_mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 nfc_devlist_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex nl_table_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &c->lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 &sighand->siglock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock nl_table_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rlock-AF_NETLINK irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock nl_table_wait.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock lock#8 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock id_table_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock krc.lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &cfs_rq->removed.lock irq_context: 0 cb_lock (console_sem).lock irq_context: 0 cb_lock console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &table->lock#3 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem stock_lock irq_context: 0 pcpu_lock stock_lock irq_context: 0 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &sighand->siglock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_lock_key &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq net/wireless/reg.c:533 irq_context: softirq net/wireless/reg.c:533 rcu_read_lock &pool->lock irq_context: softirq net/wireless/reg.c:533 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq net/wireless/reg.c:533 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex reg_indoor_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex krc.lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex reg_requests_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex reg_pending_beacons_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex fs_reclaim irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &mm->mmap_lock stock_lock irq_context: 0 misc_mtx remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)events_power_efficient (gc_work).work pool_lock#2 irq_context: 0 &xt[i].mutex &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 &hdev->req_lock &n->list_lock irq_context: 0 &hdev->req_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &n->list_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &n->list_lock &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 cb_lock console_lock console_srcu console_owner irq_context: 0 rtnl_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound connector_reaper_work sched_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 tomoyo_ss batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu pool_lock#2 irq_context: 0 rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rq->__lock cpu_asid_lock irq_context: 0 kn->active#52 &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#926 irq_context: 0 cb_lock console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock quarantine_lock irq_context: softirq (&peer->timer_persistent_keepalive) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 tasklist_lock stock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rq->__lock cpu_asid_lock irq_context: 0 &dev->mode_config.mutex &mm->mmap_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 &dev->mutex stock_lock irq_context: 0 &pipe->mutex/1 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 batched_entropy_u8.lock crngs.lock irq_context: 0 &dev->mutex key irq_context: 0 &dev->mutex pcpu_lock irq_context: 0 &dev->mutex percpu_counters_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_writers#3 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fsnotify_mark_srcu key irq_context: 0 &fsnotify_mark_srcu pcpu_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &xt[i].mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex pcpu_lock stock_lock irq_context: 0 &dev->mode_config.mutex &mm->mmap_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rq->__lock cpu_asid_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock fs_reclaim &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock fs_reclaim pool_lock#2 irq_context: 0 &disk->open_mutex inode_hash_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 key#25 irq_context: 0 key#25 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: softirq rcu_read_lock hwsim_radio_lock batched_entropy_u8.lock irq_context: 0 rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_read_lock hwsim_radio_lock kfence_freelist_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &dev->mode_config.mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 tomoyo_ss &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1251 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#6 irq_context: 0 pernet_ops_rwsem rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex &lock->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 &c->lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#371 irq_context: 0 &xt[i].mutex purge_vmap_area_lock &base->lock irq_context: 0 &xt[i].mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex purge_vmap_area_lock &base->lock &obj_hash[i].lock irq_context: 0 &pipe->mutex/1 &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rnp->exp_wq[1] irq_context: 0 &xt[i].mutex fs_reclaim &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &simple_offset_xa_lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_lock_key#5 irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_lock_key#5 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 &fsnotify_mark_srcu irq_context: 0 &type->i_mutex_dir_key/1 &s->s_inode_list_lock irq_context: 0 &type->i_mutex_dir_key/1 &sbinfo->stat_lock irq_context: 0 &type->i_mutex_dir_key/1 &xa->xa_lock#9 irq_context: 0 &type->i_mutex_dir_key/1 &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->i_mutex_dir_key/1 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnettable->lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnettable->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1152 &rq->__lock irq_context: 0 &xt[i].mutex fs_reclaim &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->routes.lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->routes.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1152 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1056 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1196 &rq->__lock irq_context: 0 nfc_devlist_mutex gdp_mutex gdp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER/1 &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1196 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback put_task_map-wait-type-override stock_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount#2 irq_context: 0 sk_lock-AF_INET &____s->seqcount#2 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1196 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#981 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_INET rcu_node_0 irq_context: 0 sk_lock-AF_INET &rcu_state.expedited_wq irq_context: 0 sk_lock-AF_INET &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &obj_hash[i].lock pool_lock irq_context: 0 sk_lock-AF_INET &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh &r->producer_lock#3 irq_context: 0 sk_lock-AF_INET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock &obj_hash[i].lock pool_lock irq_context: 0 &hdev->req_lock &hdev->lock &n->list_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &____s->seqcount#2 irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 &group->mark_mutex remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#981 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#383 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#980 irq_context: 0 pernet_ops_rwsem rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rnp->exp_wq[1] irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock batched_entropy_u8.lock crngs.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_QIPCRTR slock-AF_QIPCRTR irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_QIPCRTR clock-AF_QIPCRTR irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &____s->seqcount#2 irq_context: softirq rcu_read_lock rcu_read_lock quarantine_lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET remove_cache_srcu &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_QIPCRTR irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex pool_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start key irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_lock_key &xa->xa_lock#9 pool_lock#2 irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 &obj_hash[i].lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 &base->lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 &base->lock &obj_hash[i].lock irq_context: 0 nf_sockopt_mutex key irq_context: 0 nf_sockopt_mutex pcpu_lock irq_context: 0 nf_sockopt_mutex percpu_counters_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET remove_cache_srcu pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1167 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &dev->mutex rfkill_global_mutex &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc37_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1075 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#371 irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (shepherd).work &rq->__lock irq_context: 0 (wq_completion)events (shepherd).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#579 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1086 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1137 irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu irq_context: 0 tasklist_lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex k-sk_lock-AF_INET6 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &c->lock irq_context: 0 rtnl_mutex k-sk_lock-AF_INET6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &rq->__lock cpu_asid_lock irq_context: softirq (&p->timer) irq_context: softirq (&p->timer) &br->multicast_lock irq_context: softirq (&p->timer) &br->multicast_lock pool_lock#2 irq_context: softirq (&p->timer) &br->multicast_lock &dir->lock#2 irq_context: softirq (&p->timer) &br->multicast_lock deferred_lock irq_context: softirq (&p->timer) &br->multicast_lock rcu_read_lock &pool->lock irq_context: softirq (&p->timer) &br->multicast_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&p->timer) &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&p->timer) &br->multicast_lock nl_table_lock irq_context: softirq (&p->timer) &br->multicast_lock &obj_hash[i].lock irq_context: softirq (&p->timer) &br->multicast_lock nl_table_wait.lock irq_context: softirq (&p->timer) &br->multicast_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&p->timer) &br->multicast_lock &base->lock irq_context: softirq (&p->timer) &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: softirq (&p->timer) &br->multicast_lock &c->lock irq_context: softirq (&p->timer) &br->multicast_lock &n->list_lock irq_context: softirq (&p->timer) &br->multicast_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &ndev->lock irq_context: softirq rcu_read_lock rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#951 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&mp->timer) &br->multicast_lock pool_lock#2 irq_context: softirq (&mp->timer) &br->multicast_lock &dir->lock#2 irq_context: softirq (&mp->timer) &br->multicast_lock deferred_lock irq_context: softirq (&mp->timer) &br->multicast_lock nl_table_lock irq_context: softirq (&mp->timer) &br->multicast_lock &obj_hash[i].lock irq_context: softirq (&mp->timer) &br->multicast_lock nl_table_wait.lock irq_context: softirq (&mp->timer) &br->multicast_lock &c->lock irq_context: softirq (&timer.timer) &p->pi_lock &rq->__lock &obj_hash[i].lock irq_context: softirq (&timer.timer) &p->pi_lock &rq->__lock &base->lock irq_context: softirq (&timer.timer) &p->pi_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#474 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1126 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1128 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#470 irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sk_lock-AF_INET &net->xfrm.xfrm_policy_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 &base->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 krc.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 rcu_read_lock rhashtable_bucket irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 rcu_read_lock &pool->lock irq_context: 0 &u->iolock &mm->mmap_lock &obj_hash[i].lock irq_context: 0 crtc_ww_class_mutex &lock->wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 &u->iolock &mm->mmap_lock pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#951 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#952 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#106 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#9 &rq->__lock irq_context: 0 &fsnotify_mark_srcu &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem batched_entropy_u8.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem kfence_freelist_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1152 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rnp->exp_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rnp->exp_wq[3] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu irq_context: 0 sb_writers#3 &s->s_inode_list_lock irq_context: 0 sb_writers#3 sb_internal irq_context: 0 sb_writers#3 sb_internal mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 sb_internal pool_lock#2 irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &xa->xa_lock#9 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_es_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 sb_internal &obj_hash[i].lock irq_context: 0 sb_writers#3 inode_hash_lock irq_context: 0 sb_writers#3 inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &fsnotify_mark_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rename_lock.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &wb->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &sbi->s_md_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &xa->xa_lock#9 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_es_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_wait_updates irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &obj_hash[i].lock irq_context: 0 &fsnotify_mark_srcu &c->lock irq_context: 0 &xt[i].mutex purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex purge_vmap_area_lock pool_lock#2 irq_context: 0 &xt[i].mutex init_mm.page_table_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#480 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#5 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 rfkill_global_mutex rcu_read_lock &rq->__lock irq_context: 0 put_task_map-wait-type-override#2 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 misc_mtx rfkill_global_mutex &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_es_lock key#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#7 tomoyo_ss &____s->seqcount#2 irq_context: 0 sb_writers#7 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 sb_writers#7 tomoyo_ss &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 tomoyo_ss &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &c->lock irq_context: 0 (wq_completion)nfc11_nci_rx_wq#5 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock batched_entropy_u8.lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rcu_state.gp_wq irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_tx_wq#5 irq_context: 0 rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock irq_context: 0 rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &obj_hash[i].lock pool_lock irq_context: 0 sched_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 sched_map-wait-type-override &obj_hash[i].lock irq_context: 0 sched_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &c->lock irq_context: 0 sk_lock-AF_INET fs_reclaim &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &xt[i].mutex purge_vmap_area_lock &meta->lock irq_context: 0 misc_mtx (wq_completion)nfc17_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &____s->seqcount irq_context: 0 sched_map-wait-type-override &rq->__lock cpu_asid_lock irq_context: 0 kn->active#4 &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->pndevs.lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &____s->seqcount#2 irq_context: 0 &xt[i].mutex rcu_read_lock &pool->lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &____s->seqcount irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#6 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem stock_lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem percpu_counters_lock irq_context: 0 rcu_read_lock pool_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu stock_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu key irq_context: 0 &mm->mmap_lock remove_cache_srcu pcpu_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#464 irq_context: 0 &u->iolock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rename_lock irq_context: 0 (wq_completion)nfc14_nci_tx_wq#6 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->pndevs.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#981 irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) irq_context: 0 &mdev->req_queue_mutex &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#4 &____s->seqcount#2 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#47 &obj_hash[i].lock pool_lock irq_context: 0 &type->s_umount_key#47 &rq->__lock irq_context: 0 &type->s_umount_key#47 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#480 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu rcu_node_0 irq_context: 0 &dev_instance->mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &n->list_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock batched_entropy_u8.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock kfence_freelist_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &xattrs->lock irq_context: 0 wq_pool_attach_mutex &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock quarantine_lock irq_context: 0 sk_lock-AF_PACKET &cfs_rq->removed.lock irq_context: 0 rcu_state.barrier_mutex rcu_state.barrier_mutex.wait_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: softirq rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &fq->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->queue_stop_reason_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &list->lock#16 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &list->lock#16 irq_context: 0 &dev->mutex kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 &dev->dev_mutex fs_reclaim &rq->__lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex &meta->lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock pcpu_lock stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#5 irq_context: 0 cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#5 irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock lweventlist_lock &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#480 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#480 irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem quarantine_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &n->list_lock irq_context: 0 cb_lock &rdev->wiphy.mtx &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &lock->wait_lock irq_context: 0 cb_lock &lock->wait_lock irq_context: softirq fs/notify/mark.c:89 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq fs/notify/mark.c:89 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 &p->lock &of->mutex kn->active#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &lock->wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &lock->wait_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &c->lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &____s->seqcount#2 irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &____s->seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss &____s->seqcount#2 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &____s->seqcount#2 irq_context: 0 kn->active#48 &c->lock irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 kn->active#49 &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sta->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx pin_fs_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx nl_table_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 cb_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &____s->seqcount irq_context: 0 kn->active#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &____s->seqcount#2 irq_context: 0 kn->active#4 &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &lock->wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem nl_table_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem nl_table_wait.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &____s->seqcount irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 rtnl_mutex _xmit_ETHER &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &pcp->lock &zone->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex lock kernfs_idr_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &n->list_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &n->list_lock &c->lock irq_context: 0 misc_mtx &____s->seqcount#2 irq_context: 0 misc_mtx &n->list_lock irq_context: 0 misc_mtx &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &n->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock &____s->seqcount#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &c->lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#50 &c->lock irq_context: 0 ebt_mutex &c->lock irq_context: 0 nf_nat_proto_mutex &____s->seqcount#2 irq_context: 0 nf_nat_proto_mutex &____s->seqcount irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override pool_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &obj_hash[i].lock irq_context: 0 tomoyo_ss mount_lock irq_context: 0 tomoyo_ss mount_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss mount_lock irq_context: 0 rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &pcp->lock &zone->lock &____s->seqcount irq_context: softirq rcu_read_lock hwsim_radio_lock &pcp->lock &zone->lock irq_context: softirq rcu_read_lock hwsim_radio_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#2 rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key#2 &xa->xa_lock#5 irq_context: 0 &type->i_mutex_dir_key#2 &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#2 stock_lock irq_context: 0 &type->i_mutex_dir_key#2 &dentry->d_lock &wq#2 irq_context: 0 sb_writers#7 kn->active#4 &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 misc_mtx stock_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex &obj_hash[i].lock pool_lock irq_context: 0 &xa->xa_lock#20 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 slock-AF_INET#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 rlock-AF_INET irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 &list->lock#17 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 &sctp_ep_hashtable[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 clock-AF_INET irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &n->list_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 cb_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 sb_writers#3 &rq->__lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx lock kernfs_idr_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss mount_lock rcu_read_lock rename_lock.seqcount irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &rq->__lock irq_context: 0 &xt[i].mutex &rq->__lock irq_context: 0 &xt[i].mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock pool_lock#2 irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex &dev->tx_global_lock irq_context: 0 rtnl_mutex &dev->tx_global_lock _xmit_ETHER#2 irq_context: 0 rtnl_mutex &dev->tx_global_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &lock->wait_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &sch->q.lock irq_context: softirq rcu_read_lock &table->lock#3 irq_context: softirq rcu_read_lock rcu_read_lock &table->lock#3 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &table->lock#3 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &dir->lock#2 irq_context: 0 &sig->cred_guard_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &new_node->seq_out_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock quarantine_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->s_umount_key#46/1 &____s->seqcount#2 irq_context: 0 &type->s_umount_key#46/1 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#46/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#6 &root->kernfs_rwsem &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem &____s->seqcount#2 irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &root->kernfs_rwsem pool_lock#2 irq_context: 0 kn->active#49 &n->list_lock irq_context: 0 kn->active#49 &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &ndev->lock irq_context: 0 cb_lock rcu_read_lock &n->list_lock irq_context: 0 cb_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex console_owner_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex console_owner irq_context: 0 cb_lock genl_mutex rtnl_mutex console_lock console_srcu console_owner_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex console_lock console_srcu console_owner irq_context: 0 cb_lock genl_mutex rtnl_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &____s->seqcount irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock fs_reclaim &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &c->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem pool_lock#2 irq_context: 0 &mm->mmap_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#5 &____s->seqcount#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &____s->seqcount irq_context: 0 cb_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &____s->seqcount#2 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &group->notification_waitq &ep->lock &ep->wq irq_context: 0 &group->notification_waitq &ep->lock &ep->wq &p->pi_lock irq_context: 0 &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &pcp->lock &zone->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &____s->seqcount#2 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &c->lock irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_nat_proto_mutex &obj_hash[i].lock pool_lock irq_context: 0 nf_nat_proto_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 nf_nat_proto_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 nf_nat_proto_mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 nf_nat_proto_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 nf_nat_proto_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &c->lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &____s->seqcount#2 irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock lock#4 &lruvec->lru_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex &rq->__lock irq_context: 0 misc_mtx &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &____s->seqcount#2 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rcu_state.expedited_wq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock &____s->seqcount#2 irq_context: 0 &type->s_umount_key#46/1 &n->list_lock irq_context: 0 &type->s_umount_key#46/1 &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock kfence_freelist_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &____s->seqcount#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &x->wait#2 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex cgroup_mutex.wait_lock irq_context: 0 kn->active#51 remove_cache_srcu irq_context: 0 cb_lock rtnl_mutex &cfs_rq->removed.lock irq_context: 0 cb_lock rtnl_mutex &obj_hash[i].lock irq_context: softirq rcu_read_lock &meta->lock irq_context: 0 cb_lock rtnl_mutex pool_lock#2 irq_context: 0 &dev->mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 &dev->mutex kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 tomoyo_ss &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock once_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock once_lock crngs.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex &____s->seqcount irq_context: 0 misc_mtx &dev->mutex &c->lock irq_context: 0 misc_mtx &dev->mutex &____s->seqcount#2 irq_context: 0 misc_mtx &dev->mutex &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 iattr_mutex &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &local->filter_lock irq_context: 0 &type->i_mutex_dir_key#4 iattr_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &sighand->siglock stock_lock irq_context: 0 rcu_read_lock &sighand->siglock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock &sighand->siglock stock_lock irq_context: 0 tasklist_lock &sighand->siglock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.waiters.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &fq->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &local->active_txq_lock[i] irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx (work_completion)(&sta->drv_deliver_wk) irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rq->__lock cpu_asid_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &pcp->lock &zone->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq fs/notify/mark.c:89 rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock stock_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock &obj_hash[i].lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock key irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock pcpu_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock percpu_counters_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock pcpu_lock stock_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock pool_lock#2 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex.wait_lock irq_context: 0 sb_writers#8 &of->mutex &p->pi_lock irq_context: 0 sb_writers#8 &of->mutex &p->pi_lock &rq->__lock irq_context: 0 sb_writers#8 &of->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 cgroup_mutex.wait_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &p->pi_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex.wait_lock irq_context: 0 sb_writers#9 &of->mutex &p->pi_lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &c->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &____s->seqcount#2 irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &____s->seqcount irq_context: 0 kn->active#51 &c->lock irq_context: 0 kn->active#51 &n->list_lock irq_context: 0 kn->active#51 &n->list_lock &c->lock irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6/1 cgroup_mutex &pcp->lock &zone->lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 tomoyo_ss &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &type->i_mutex_dir_key#6 &c->lock irq_context: 0 &type->i_mutex_dir_key#7 &c->lock irq_context: 0 &type->i_mutex_dir_key#7 &____s->seqcount#2 irq_context: 0 &type->i_mutex_dir_key#7 &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#7 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#7 &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &____s->seqcount#2 irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#7 &root->kernfs_rwsem &____s->seqcount irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cgroup_mutex.wait_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#9 &type->i_mutex_dir_key#7/1 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem stock_lock irq_context: 0 &type->i_mutex_dir_key#7 pool_lock#2 irq_context: 0 sb_writers#3 sb_internal &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ebt_mutex &n->list_lock irq_context: 0 ebt_mutex &n->list_lock &c->lock irq_context: 0 ebt_mutex &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 kn->active#51 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#51 remove_cache_srcu &c->lock irq_context: 0 kn->active#51 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#51 remove_cache_srcu &obj_hash[i].lock irq_context: 0 kn->active#52 &c->lock irq_context: 0 nf_hook_mutex &rq->__lock irq_context: 0 nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_hook_mutex nf_hook_mutex.wait_lock irq_context: 0 nf_hook_mutex.wait_lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex.wait_lock irq_context: 0 nf_nat_proto_mutex &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &base->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &table->lock#3 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)events_unbound (reaper_work).work rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq rcu_callback per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: softirq rcu_callback stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: softirq rcu_read_lock kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock kfence_freelist_lock irq_context: softirq rcu_read_lock hwsim_radio_lock &n->list_lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.gp_wq irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &cfs_rq->removed.lock irq_context: 0 &p->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &c->lock irq_context: 0 &vma->vm_lock->lock &xa->xa_lock#9 &c->lock irq_context: 0 tomoyo_ss remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &sch->q.lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &sch->q.lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &c->lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: softirq rcu_read_lock dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 &____s->seqcount#2 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &group->mark_mutex &____s->seqcount#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &obj_hash[i].lock irq_context: softirq (&peer->timer_persistent_keepalive) irq_context: softirq (&peer->timer_persistent_keepalive) pool_lock#2 irq_context: softirq (&peer->timer_persistent_keepalive) &list->lock#14 irq_context: softirq (&peer->timer_persistent_keepalive) tk_core.seq.seqcount irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh &r->producer_lock#2 irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &table->lock#3 irq_context: 0 cgroup_threadgroup_rwsem &p->pi_lock &rq->__lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev_instance->mutex &____s->seqcount#2 irq_context: 0 misc_mtx nfc_devlist_mutex &____s->seqcount#2 irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex kfence_freelist_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &meta->lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rq->__lock irq_context: 0 sk_lock-AF_INET &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &____s->seqcount irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock &rq->__lock irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock &p->alloc_lock irq_context: softirq rcu_read_lock rcu_read_lock &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &c->lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &____s->seqcount irq_context: 0 drm_unplug_srcu irq_context: 0 &file->master_lookup_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.last_changeset_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tvlv.container_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tvlv.container_list_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_state_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_state_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &ul->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 &dev->event_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle bit_wait_table + i irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 batched_entropy_u8.lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override pool_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex quarantine_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &mapping->i_private_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &mapping->i_private_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &ret->b_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 &fsnotify_mark_srcu &rq->__lock irq_context: 0 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &pcp->lock &zone->lock irq_context: softirq rcu_read_lock hwsim_radio_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &meta->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 jbd2_handle &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 irq_context: 0 &dev->mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 &dev->mutex &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 kn->active#4 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) irq_context: softirq (&ndev->rs_timer) &ndev->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock &ndev->lock irq_context: softirq (&ndev->rs_timer) pool_lock#2 irq_context: softirq (&ndev->rs_timer) &c->lock irq_context: softirq (&ndev->rs_timer) &dir->lock#2 irq_context: softirq (&ndev->rs_timer) &ul->lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#11 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &ndev->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: softirq (&ndev->rs_timer) &ndev->lock batched_entropy_u32.lock irq_context: softirq (&ndev->rs_timer) &ndev->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) &ndev->lock &base->lock irq_context: softirq (&ndev->rs_timer) &ndev->lock &base->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &pcp->lock &zone->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &meta->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &n->list_lock irq_context: softirq &(&bat_priv->tt.work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 nfc_devlist_mutex &sem->wait_lock irq_context: 0 nfc_devlist_mutex &p->pi_lock irq_context: 0 nfc_devlist_mutex &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &new_node->seq_out_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock pool_lock#2 irq_context: 0 &sighand->siglock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex stock_lock irq_context: 0 &xt[i].mutex key irq_context: 0 &f->f_pos_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 sb_writers#3 sb_internal &rcu_state.expedited_wq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &n->list_lock irq_context: 0 &dev->mutex uevent_sock_mutex &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &n->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex &n->list_lock &c->lock irq_context: 0 misc_mtx &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 misc_mtx &dev->mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rcu_read_lock fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rq->__lock cpu_asid_lock irq_context: 0 &u->iolock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &n->list_lock irq_context: 0 &dev->mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh pool_lock#2 irq_context: 0 &ndev->req_lock &wq->mutex &rq->__lock irq_context: 0 &ndev->req_lock &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 kfence_freelist_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#474 irq_context: 0 nfc_devlist_mutex &rq->__lock irq_context: 0 nfc_devlist_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex batched_entropy_u8.lock irq_context: 0 nfc_devlist_mutex kfence_freelist_lock irq_context: 0 nfc_devlist_mutex &meta->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &n->list_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &c->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &ei->i_es_lock key#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &____s->seqcount irq_context: 0 &vma->vm_lock->lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &____s->seqcount#2 irq_context: 0 &vma->vm_lock->lock &n->list_lock irq_context: 0 &vma->vm_lock->lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 misc_mtx &dev->mutex &pcp->lock &zone->lock irq_context: 0 &p->lock remove_cache_srcu &rq->__lock irq_context: 0 &p->lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &____s->seqcount#2 irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &mapping->i_private_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_node_0 irq_context: 0 &dev->mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 &dev->mutex uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &kernfs_locks->open_file_mutex[count] fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override pool_lock irq_context: 0 &group->mark_mutex remove_cache_srcu irq_context: 0 &group->mark_mutex remove_cache_srcu quarantine_lock irq_context: 0 &group->mark_mutex remove_cache_srcu &c->lock irq_context: 0 &group->mark_mutex remove_cache_srcu &n->list_lock irq_context: 0 &group->mark_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &group->mark_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &group->mark_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&peer->timer_persistent_keepalive) &c->lock irq_context: softirq (&ndev->rs_timer) &ndev->lock batched_entropy_u32.lock crngs.lock irq_context: softirq &(&bat_priv->tt.work)->timer irq_context: softirq &(&bat_priv->tt.work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&bat_priv->tt.work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) key#16 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) key#21 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &bat_priv->tt.req_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &bat_priv->tt.roam_list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &base->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &ndev->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex quarantine_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &n->list_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &ei->i_es_lock key#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_es_lock key#5 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ebt_mutex &mm->mmap_lock &rq->__lock irq_context: 0 ebt_mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ebt_mutex &rq->__lock irq_context: 0 ebt_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex rcu_read_lock rcu_node_0 irq_context: 0 &xt[i].mutex rcu_read_lock &rq->__lock irq_context: 0 &xt[i].mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &fq->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->queue_stop_reason_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &list->lock#16 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &list->lock#16 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &n->list_lock irq_context: 0 &xt[i].mutex fs_reclaim &rq->__lock irq_context: 0 &xt[i].mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &n->list_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &c->lock irq_context: 0 sched_map-wait-type-override &rq->__lock irq_context: 0 sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &cfs_rq->removed.lock irq_context: 0 sb_writers#3 remove_cache_srcu pool_lock#2 irq_context: 0 &mm->mmap_lock rcu_read_lock stock_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle key#4 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: softirq &(&bat_priv->tt.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&bat_priv->tt.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 pool_lock#2 irq_context: 0 &xt[i].mutex remove_cache_srcu &c->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &obj_hash[i].lock irq_context: 0 tomoyo_ss remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#7 kn->active#4 &____s->seqcount#2 irq_context: 0 sb_writers#7 kn->active#4 &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_lock_key &xa->xa_lock#9 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: softirq (&ndev->rs_timer) &n->list_lock irq_context: 0 nfc_devlist_mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 &xt[i].mutex pcpu_lock irq_context: 0 &xt[i].mutex percpu_counters_lock irq_context: 0 &xt[i].mutex pcpu_lock stock_lock irq_context: 0 &xt[i].mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 &xa->xa_lock#5 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 &xa->xa_lock#5 &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 &xa->xa_lock#5 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &xa->xa_lock#9 &c->lock irq_context: 0 &xt[i].mutex &lock->wait_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &pcp->lock &zone->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &f->f_pos_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 &____s->seqcount#2 irq_context: 0 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &meta->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 kn->active#4 &n->list_lock irq_context: 0 sb_writers#7 kn->active#4 &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &ul->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 tomoyo_ss &____s->seqcount#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock crngs.lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &meta->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) &base->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 misc_mtx fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) &base->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 sb_internal &____s->seqcount#2 irq_context: 0 sb_writers#3 sb_internal &pcp->lock &zone->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#7 tomoyo_ss quarantine_lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#7 kn->active#4 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 fs_reclaim &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &n->list_lock &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &meta->lock irq_context: 0 &xt[i].mutex remove_cache_srcu irq_context: 0 &xt[i].mutex remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &mapping->i_private_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &n->list_lock &c->lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &rq->__lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &c->lock irq_context: 0 remove_cache_srcu pool_lock#2 irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#14 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu pool_lock#2 irq_context: 0 &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 nfc_devlist_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &sem->wait_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &p->pi_lock irq_context: 0 sb_writers#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &fq->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->active_txq_lock[i] irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->queue_stop_reason_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &list->lock#16 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &list->lock#16 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex &n->list_lock irq_context: 0 misc_mtx &dev->mutex &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &child->perf_event_mutex &rq->__lock irq_context: 0 &child->perf_event_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &n->list_lock irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock pool_lock#2 irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &n->lock irq_context: 0 sb_writers#3 jbd2_handle &obj_hash[i].lock irq_context: 0 &dev->mutex &____s->seqcount#2 irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 nfc_devlist_mutex &n->list_lock irq_context: 0 nfc_devlist_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock remove_cache_srcu pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock quarantine_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_node_0 irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex freezer_mutex.wait_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock rcu_node_0 irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex.wait_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#5 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macsec_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &base->lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 &fsnotify_mark_srcu &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex &sem->wait_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock quarantine_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &____s->seqcount#2 irq_context: 0 misc_mtx remove_cache_srcu irq_context: 0 misc_mtx remove_cache_srcu quarantine_lock irq_context: 0 misc_mtx remove_cache_srcu &c->lock irq_context: 0 misc_mtx remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx remove_cache_srcu &obj_hash[i].lock irq_context: 0 misc_mtx remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 misc_mtx remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 pool_lock#2 irq_context: 0 sb_writers#3 sb_internal &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 sb_internal &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 &dev->mutex &sem->wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex &obj_hash[i].lock irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock &c->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_node_0 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &rq->__lock irq_context: 0 sb_writers#4 oom_adj_mutex &rq->__lock irq_context: 0 sb_writers#4 oom_adj_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#5 &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#5 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 rcu_read_lock &ndev->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 &obj_hash[i].lock irq_context: 0 &xt[i].mutex remove_cache_srcu &n->list_lock irq_context: 0 &xt[i].mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &xt[i].mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &xt[i].mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 &____s->seqcount#2 irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &obj_hash[i].lock irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock pool_lock#2 irq_context: 0 kn->active#4 remove_cache_srcu pool_lock#2 irq_context: 0 &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle quarantine_lock irq_context: 0 &mm->mmap_lock &____s->seqcount#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &group->mark_mutex &fsnotify_mark_srcu &rq->__lock irq_context: 0 &group->mark_mutex &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &group->mark_mutex &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &ep->mtx &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_node_0 irq_context: 0 misc_mtx fs_reclaim &rq->__lock irq_context: 0 misc_mtx fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: softirq (&ndev->rs_timer) &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock lweventlist_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock irq_context: 0 &p->lock &of->mutex &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu quarantine_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &____s->seqcount#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)rcu_gp &rq->__lock irq_context: 0 (wq_completion)rcu_gp &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &bgl->locks[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &____s->seqcount irq_context: 0 &vma->vm_lock->lock &____s->seqcount#2 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &____s->seqcount#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq irq_context: 0 &wq->mutex &rq->__lock irq_context: 0 &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &mapping->i_private_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &____s->seqcount irq_context: 0 sb_writers#3 sb_internal &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 sb_internal &____s->seqcount irq_context: 0 (wq_completion)events_unbound (reaper_work).work &obj_hash[i].lock pool_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &u->iolock &mm->mmap_lock &rq->__lock irq_context: 0 &u->iolock &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 &rq->__lock cpu_asid_lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &bgl->locks[i].lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#579 &rq->__lock irq_context: 0 misc_mtx &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound connector_reaper_work &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &r->producer_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &n->list_lock &c->lock irq_context: 0 cgroup_threadgroup_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tsk->futex_exit_mutex &rq->__lock irq_context: 0 &tsk->futex_exit_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &pcp->lock &zone->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &sem->wait_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal &rq->__lock irq_context: 0 sb_writers#3 sb_internal &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &root->kernfs_iattr_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock stock_lock irq_context: 0 rcu_read_lock pcpu_lock stock_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &pcp->lock &zone->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &n->list_lock &c->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh &r->producer_lock#3 irq_context: 0 (wq_completion)bat_events &rq->__lock irq_context: 0 (wq_completion)bat_events &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 &rq->__lock irq_context: softirq (&peer->timer_retransmit_handshake) irq_context: softirq (&peer->timer_retransmit_handshake) &peer->endpoint_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex &cfs_rq->removed.lock irq_context: 0 &disk->open_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock rcu_read_lock &ndev->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &r->producer_lock#3 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle batched_entropy_u8.lock irq_context: 0 &p->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#7 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 rcu_node_0 irq_context: 0 file_rwsem &rq->__lock irq_context: 0 lock pidmap_lock &____s->seqcount#2 irq_context: 0 lock pidmap_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu pool_lock#2 irq_context: 0 &sig->cred_guard_mutex fs_reclaim &rq->__lock irq_context: 0 &sig->cred_guard_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 l2tp_ip6_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock krc.lock &base->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &table->lock#3 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh pool_lock#2 irq_context: 0 &mm->mmap_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1152 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1152 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1152 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1153 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1153 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1153 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock stock_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock key irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_node_0 irq_context: 0 &hdev->req_lock &hdev->lock &____s->seqcount#2 irq_context: 0 &hdev->req_lock &hdev->lock &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock &c->lock irq_context: 0 &hdev->req_lock &hdev->lock quarantine_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1153 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rq->__lock irq_context: 0 &group->mark_mutex fs_reclaim &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock quarantine_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1153 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1153 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1154 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem &bgl->locks[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &new_node->seq_out_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock pool_lock#2 irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 key#22 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 &root->kernfs_iattr_rwsem pool_lock#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &c->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_state_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start stock_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start key irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &c->lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->softif_vlan_list_lock &n->list_lock &c->lock irq_context: 0 &pipe->mutex/1 fs_reclaim &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &rq->__lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &cfs_rq->removed.lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock key irq_context: 0 &mm->mmap_lock pcpu_lock irq_context: 0 &mm->mmap_lock percpu_counters_lock irq_context: 0 &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &____s->seqcount irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &rq->__lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_retransmit_handshake) &peer->endpoint_lock &obj_hash[i].lock irq_context: softirq (&peer->timer_retransmit_handshake) &peer->endpoint_lock pool_lock#2 irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1154 &rq->__lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &fq->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->active_txq_lock[i] irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->active_txq_lock[i] irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &local->queue_stop_reason_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &fq->lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock tk_core.seq.seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &list->lock#16 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock &list->lock#16 irq_context: 0 &hdev->lock &c->lock irq_context: 0 uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uevent_sock_mutex &n->list_lock irq_context: 0 uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &mm->mmap_lock &rq->__lock irq_context: 0 &f->f_pos_lock &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal &n->list_lock irq_context: 0 sb_writers#3 sb_internal &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &n->list_lock &c->lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu quarantine_lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &c->lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &n->list_lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &obj_hash[i].lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &rq->__lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock &pcp->lock &zone->lock irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 sb_writers#3 jbd2_handle irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock smack_known_lock.wait_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock.wait_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle irq_context: 0 &journal->j_state_lock &journal->j_wait_transaction_locked &p->pi_lock irq_context: 0 &journal->j_state_lock &journal->j_wait_transaction_locked &p->pi_lock &rq->__lock irq_context: 0 &journal->j_state_lock &journal->j_wait_transaction_locked &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle bit_wait_table + i irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem &rq->__lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem pool_lock#2 irq_context: 0 &ret->b_state_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 file_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1154 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#481 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#481 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#475 irq_context: softirq rcu_read_lock &br->hash_lock &n->list_lock irq_context: softirq rcu_read_lock &br->hash_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#473 irq_context: 0 &xt[i].mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock &rq->__lock irq_context: 0 (wq_completion)events (shepherd).work cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1154 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1154 &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu quarantine_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &n->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &meta->lock irq_context: 0 sb_writers#7 kn->active#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu pool_lock#2 irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock percpu_counters_lock irq_context: 0 lock pidmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.gp_wq irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &n->list_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &sk->sk_lock.wq irq_context: 0 slock-AF_INET#2 &sk->sk_lock.wq irq_context: 0 slock-AF_INET#2 &sk->sk_lock.wq &p->pi_lock irq_context: 0 slock-AF_INET#2 &sk->sk_lock.wq &p->pi_lock &rq->__lock irq_context: 0 slock-AF_INET#2 &sk->sk_lock.wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &ei->i_raw_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rq->__lock cpu_asid_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu pool_lock#2 irq_context: softirq rcu_read_lock &local->rx_path_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock remove_cache_srcu &cfs_rq->removed.lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq &(&bat_priv->dat.work)->timer irq_context: softirq &(&bat_priv->dat.work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&bat_priv->dat.work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &obj_hash[i].lock irq_context: softirq &(&bat_priv->dat.work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq &(&bat_priv->dat.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&bat_priv->dat.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&bat_priv->bla.work)->timer irq_context: softirq &(&bat_priv->bla.work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&bat_priv->bla.work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &hash->list_locks[i] irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) key#20 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &entry->crc_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &list->lock#12 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) &base->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &c->lock irq_context: 0 &mm->mmap_lock sb_writers#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx pcpu_alloc_mutex &rq->__lock irq_context: 0 misc_mtx pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx pcpu_alloc_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &____s->seqcount#2 irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &pcp->lock &zone->lock irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx nfc_devlist_mutex lock kernfs_idr_lock &____s->seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sig->cred_guard_mutex batched_entropy_u8.lock irq_context: 0 &group->mark_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &group->mark_mutex &lock->wait_lock irq_context: 0 &group->mark_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock jump_label_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fsnotify_mark_srcu &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex &mm->mmap_lock pool_lock#2 irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex kfence_freelist_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock key irq_context: 0 &mm->mmap_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.gp_wq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 lock#4 &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &____s->seqcount irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 &mm->mmap_lock percpu_counters_lock irq_context: softirq &(&bat_priv->dat.work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 remove_cache_srcu pool_lock#2 irq_context: 0 &group->inotify_data.idr_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#5 quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 remove_cache_srcu fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &ei->i_raw_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex &lo->lo_mutex &rq->__lock irq_context: 0 &disk->open_mutex &lo->lo_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &disk->open_mutex &lock->wait_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_read_lock rcu_node_0 irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_read_lock &rq->__lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &n->list_lock irq_context: 0 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &n->list_lock &c->lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fsnotify_mark_srcu &n->list_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &cfs_rq->removed.lock irq_context: softirq (&n->timer) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 sb_internal quarantine_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex uevent_sock_mutex &n->list_lock irq_context: 0 &dev->mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 tomoyo_ss stock_lock irq_context: 0 sb_writers#3 sb_internal &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rq->__lock cpu_asid_lock irq_context: softirq (&n->timer) &n->lock irq_context: 0 &p->alloc_lock &x->wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &group->notification_waitq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 loop_validate_mutex &rq->__lock irq_context: 0 loop_validate_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 loop_validate_mutex loop_validate_mutex.wait_lock irq_context: 0 loop_validate_mutex.wait_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tsk->futex_exit_mutex &cfs_rq->removed.lock irq_context: 0 tomoyo_ss key irq_context: 0 tomoyo_ss pcpu_lock irq_context: 0 tomoyo_ss percpu_counters_lock irq_context: 0 tomoyo_ss pcpu_lock stock_lock irq_context: 0 &fsnotify_mark_srcu &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &meta->lock irq_context: 0 &xt[i].mutex remove_cache_srcu &rq->__lock irq_context: 0 &xt[i].mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &rq->__lock irq_context: 0 sb_writers#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&n->timer) &n->lock &obj_hash[i].lock irq_context: softirq (&n->timer) &n->lock &base->lock irq_context: softirq (&n->timer) &n->lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 remove_cache_srcu pool_lock#2 irq_context: 0 &root->kernfs_iattr_rwsem stock_lock irq_context: 0 &root->kernfs_iattr_rwsem key irq_context: 0 &root->kernfs_rwsem pool_lock#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &cfs_rq->removed.lock irq_context: 0 &dev->mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex quarantine_lock irq_context: 0 &xt[i].mutex remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &rq->__lock irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &____s->seqcount irq_context: 0 &xt[i].mutex batched_entropy_u8.lock irq_context: 0 &xt[i].mutex kfence_freelist_lock irq_context: 0 &xt[i].mutex &meta->lock irq_context: 0 nf_sockopt_mutex &rq->__lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &root->kernfs_iattr_rwsem pcpu_lock irq_context: 0 &root->kernfs_iattr_rwsem percpu_counters_lock irq_context: 0 &root->kernfs_iattr_rwsem pcpu_lock stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_sockopt_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &ACCESS_PRIVATE(ssp->srcu_sup, lock) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 remove_cache_srcu key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu irq_context: 0 &sig->cred_guard_mutex &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &obj_hash[i].lock irq_context: 0 remove_cache_srcu pcpu_lock irq_context: 0 remove_cache_srcu percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ret->b_state_lock &journal->j_list_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &rq->__lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex &rq->__lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &root->kernfs_iattr_rwsem iattr_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex remove_cache_srcu quarantine_lock irq_context: 0 tomoyo_ss remove_cache_srcu key irq_context: 0 tomoyo_ss remove_cache_srcu pcpu_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &meta->lock irq_context: 0 nf_sockopt_mutex &cfs_rq->removed.lock irq_context: 0 nf_sockopt_mutex &obj_hash[i].lock irq_context: 0 nf_sockopt_mutex pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &meta->lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock pool_lock#2 irq_context: 0 nf_sockopt_mutex nf_sockopt_mutex.wait_lock irq_context: 0 &p->lock &of->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &____s->seqcount irq_context: 0 rtnl_mutex nf_hook_mutex &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_node_0 irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock kfence_freelist_lock irq_context: softirq (&ndev->rs_timer) &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1154 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx batched_entropy_u8.lock crngs.lock irq_context: 0 key#22 irq_context: 0 misc_mtx nfc_devlist_mutex quarantine_lock irq_context: 0 misc_mtx remove_cache_srcu pool_lock#2 irq_context: 0 nf_sockopt_mutex.wait_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &pcp->lock &zone->lock irq_context: softirq (&peer->timer_send_keepalive) irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 sb_writers#5 rcu_node_0 irq_context: 0 sb_writers#5 rcu_read_lock rcu_node_0 irq_context: 0 nfc_devlist_mutex remove_cache_srcu &c->lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu &n->list_lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 nfc_devlist_mutex remove_cache_srcu &rq->__lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu pool_lock#2 irq_context: 0 tomoyo_ss remove_cache_srcu percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem pcpu_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 sb_writers#7 tomoyo_ss &cfs_rq->removed.lock irq_context: softirq rcu_callback rlock-AF_RXRPC irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &cfs_rq->removed.lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &rq->__lock cpu_asid_lock irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock nsim_bus_dev_ids.xa_lock irq_context: 0 wq_pool_attach_mutex pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &simple_offset_xa_lock &____s->seqcount irq_context: softirq (&peer->timer_send_keepalive) pool_lock#2 irq_context: softirq (&peer->timer_send_keepalive) &c->lock irq_context: softirq (&peer->timer_send_keepalive) &list->lock#14 irq_context: softirq (&peer->timer_send_keepalive) tk_core.seq.seqcount irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh &r->producer_lock#2 irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh &obj_hash[i].lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh &base->lock irq_context: softirq (&peer->timer_send_keepalive) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 (work_completion)(&data->suspend_work) irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 ebt_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 krc.lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex.wait_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 krc.lock &base->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_cb_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &rq->__lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 vlan_ioctl_mutex rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 nfc_devlist_mutex.wait_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 tomoyo_ss remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx key irq_context: 0 misc_mtx pcpu_lock irq_context: 0 misc_mtx percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu quarantine_lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &n->list_lock irq_context: 0 sb_writers#3 quarantine_lock irq_context: 0 &sig->cred_guard_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &sighand->siglock stock_lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount#2 irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) &meta->lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &n->list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock &meta->lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock kfence_freelist_lock irq_context: 0 &hdev->unregister_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 nfc_devlist_mutex nfc_devlist_mutex.wait_lock irq_context: 0 misc_mtx nfc_devlist_mutex.wait_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 irq_context: 0 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock rcu_read_lock &p->pi_lock irq_context: 0 &xa->xa_lock#17 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_node_0 irq_context: 0 rtnl_mutex &idev->mc_query_lock irq_context: 0 tomoyo_ss &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_es_lock key#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock smack_known_lock.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &cfs_rq->removed.lock irq_context: softirq &(&bat_priv->tt.work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex raw_notifier_lock irq_context: 0 rtnl_mutex &xa->xa_lock#4 &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 sb_writers#7 tomoyo_ss pool_lock#2 irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 sb_writers#7 kn->active#4 &kernfs_locks->open_file_mutex[count] &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1154 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &xa->xa_lock#4 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &x->wait#11 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock &c->lock irq_context: 0 tomoyo_ss rename_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 misc_mtx rcu_read_lock &rcu_state.gp_wq irq_context: 0 rtnl_mutex rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 krc.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock pool_lock#2 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &pcp->lock &zone->lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx batched_entropy_u8.lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &x->wait#3 irq_context: 0 &rxnet->local_mutex irq_context: 0 sb_writers#3 sb_internal jbd2_handle &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &tn->nametbl_lock irq_context: 0 rtnl_mutex &tbl->lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 kn->active#16 &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_base_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex subsys mutex#20 &rq->__lock irq_context: 0 fill_pool_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: softirq rcu_callback put_task_map-wait-type-override &meta->lock irq_context: softirq rcu_callback put_task_map-wait-type-override kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &p->pi_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_INET fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sk_lock-AF_INET fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &n->list_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&hsr->announce_timer) rcu_read_lock &____s->seqcount#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex key#23 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &n->list_lock &c->lock irq_context: 0 &dev->mutex rfkill_global_mutex rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1167 &rq->__lock irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_persistent_keepalive) &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock rcu_node_0 irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_TUNNEL#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &base->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock &c->lock irq_context: 0 &dev->mutex &root->kernfs_rwsem quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1167 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_node_0 irq_context: 0 sb_writers#3 sb_internal batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_IPGRE#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_IPGRE irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &base->lock irq_context: 0 &xt[i].mutex purge_vmap_area_lock quarantine_lock irq_context: 0 &sighand->siglock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &fn->fou_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu quarantine_lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu pool_lock#2 irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock &n->list_lock irq_context: 0 &group->mark_mutex lock &group->inotify_data.idr_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &folio_wait_table[i] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &p->pi_lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &kcov->lock kcov_remote_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu irq_context: 0 pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (&ifibss->timer) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 tasklist_lock rcu_read_lock &sighand->siglock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss tomoyo_policy_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss tomoyo_policy_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&p->wq) kfence_freelist_lock irq_context: 0 &dentry->d_lock &sb->s_type->i_lock_key#22 &lru->node[i].lock irq_context: 0 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_internal irq_context: 0 sb_internal jbd2_handle &ei->i_raw_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &sbi->s_md_lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_internal jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_internal jbd2_handle &sbi->s_orphan_lock &ei->i_raw_lock irq_context: 0 sb_internal jbd2_handle &xa->xa_lock#9 irq_context: 0 sb_internal jbd2_handle &ei->i_es_lock irq_context: 0 inode_hash_lock &sb->s_type->i_lock_key#22 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &n->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) pool_lock#2 irq_context: 0 &type->s_umount_key#47 irq_context: 0 &type->s_umount_key#47 &x->wait#23 irq_context: 0 &type->s_umount_key#47 shrinker_mutex irq_context: 0 &type->s_umount_key#47 &obj_hash[i].lock irq_context: 0 &type->s_umount_key#47 pool_lock#2 irq_context: 0 &type->s_umount_key#47 rename_lock.seqcount irq_context: 0 &type->s_umount_key#47 &dentry->d_lock irq_context: 0 &type->s_umount_key#47 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &type->s_umount_key#47 &dentry->d_lock/1 irq_context: 0 &type->s_umount_key#47 rcu_read_lock &dentry->d_lock irq_context: 0 &type->s_umount_key#47 &sb->s_type->i_lock_key#32 irq_context: 0 &type->s_umount_key#47 &s->s_inode_list_lock irq_context: 0 &type->s_umount_key#47 &xa->xa_lock#9 irq_context: 0 &type->s_umount_key#47 &fsnotify_mark_srcu irq_context: 0 &type->s_umount_key#47 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &type->s_umount_key#47 &dentry->d_lock pool_lock#2 irq_context: 0 &type->s_umount_key#47 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 &type->s_umount_key#47 binderfs_minors_mutex irq_context: 0 &type->s_umount_key#47 binderfs_minors_mutex binderfs_minors.xa_lock irq_context: 0 &mq_lock irq_context: 0 (wq_completion)events free_ipc_work irq_context: 0 (wq_completion)events free_ipc_work &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work &x->wait#3 irq_context: 0 (wq_completion)events free_ipc_work sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events free_ipc_work &rq->__lock irq_context: 0 (wq_completion)events free_ipc_work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kcov_remote_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &in_dev->mc_tomb_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&s->destroy_work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock mount_lock irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)events free_ipc_work mount_lock irq_context: 0 (wq_completion)events free_ipc_work mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)events free_ipc_work &fsnotify_mark_srcu irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &x->wait#23 irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 shrinker_mutex irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 rename_lock.seqcount irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &dentry->d_lock irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &sb->s_type->i_lock_key#19 irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &s->s_inode_list_lock irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &xa->xa_lock#9 irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &fsnotify_mark_srcu irq_context: 0 (wq_completion)events free_ipc_work &type->s_umount_key#48 &dentry->d_lock/1 irq_context: 0 (wq_completion)events free_ipc_work sb_lock irq_context: 0 (wq_completion)events free_ipc_work unnamed_dev_ida.xa_lock irq_context: 0 (wq_completion)events free_ipc_work list_lrus_mutex irq_context: 0 (wq_completion)events free_ipc_work &xa->xa_lock#5 irq_context: 0 (wq_completion)events free_ipc_work pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work sb_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work sb_lock pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work mnt_id_ida.xa_lock irq_context: 0 (wq_completion)events free_ipc_work &ids->rwsem irq_context: 0 (wq_completion)events free_ipc_work (work_completion)(&ht->run_work) irq_context: 0 (wq_completion)events free_ipc_work &ht->mutex irq_context: 0 (wq_completion)events free_ipc_work &ht->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work &ht->mutex pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work percpu_counters_lock irq_context: 0 (wq_completion)events free_ipc_work pcpu_lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock krc.lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock krc.lock &base->lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock sysctl_lock irq_context: 0 (wq_completion)events free_ipc_work rcu_read_lock &sb->s_type->i_lock_key#23 irq_context: 0 (wq_completion)events free_ipc_work &sb->s_type->i_lock_key#23 irq_context: 0 (wq_completion)events free_ipc_work &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock irq_context: 0 (wq_completion)events free_ipc_work rename_lock.seqcount irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock &sb->s_type->i_lock_key#23 &lru->node[i].lock irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work &s->s_inode_list_lock irq_context: 0 (wq_completion)events free_ipc_work &xa->xa_lock#9 irq_context: 0 (wq_completion)events free_ipc_work proc_inum_ida.xa_lock irq_context: 0 sb_writers#4 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#4 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle key#4 irq_context: 0 sk_lock-AF_INET6 &rq->__lock irq_context: 0 sk_lock-AF_INET6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 hci_dev_list_lock irq_context: 0 (work_completion)(&hdev->power_on) irq_context: 0 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (work_completion)(&hdev->reenable_adv_work) irq_context: 0 &hdev->cmd_sync_work_lock irq_context: 0 &hdev->req_lock (work_completion)(&(&hdev->interleave_scan)->work) irq_context: 0 &hdev->req_lock hci_dev_list_lock irq_context: 0 &hdev->req_lock (work_completion)(&hdev->tx_work) irq_context: 0 &hdev->req_lock (work_completion)(&hdev->rx_work) irq_context: 0 &hdev->req_lock &wq->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu pool_lock#2 irq_context: 0 &hdev->req_lock &wq->mutex &pool->lock irq_context: 0 &hdev->req_lock &wq->mutex &x->wait#10 irq_context: 0 &hdev->req_lock &hdev->lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->id_addr_timer)->work) irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &x->wait#3 irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock pool_lock#2 irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->info_timer)->work) irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 &hdev->req_lock &hdev->lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock &base->lock irq_context: 0 &hdev->req_lock &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock (work_completion)(&(&conn->disc_work)->work) irq_context: 0 &hdev->req_lock &hdev->lock (work_completion)(&(&conn->auto_accept_work)->work) irq_context: 0 &hdev->req_lock &hdev->lock (work_completion)(&(&conn->idle_work)->work) irq_context: 0 &hdev->req_lock &hdev->lock &x->wait#3 irq_context: 0 &hdev->req_lock &hdev->lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tasklist_lock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &hdev->req_lock &hdev->lock &k->k_lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem pool_lock#2 irq_context: 0 &hdev->req_lock &hdev->lock kernfs_idr_lock irq_context: 0 &hdev->req_lock &hdev->lock pool_lock#2 irq_context: 0 &hdev->req_lock &hdev->lock &k->k_lock klist_remove_lock irq_context: 0 &hdev->req_lock &hdev->lock &k->list_lock irq_context: 0 &hdev->req_lock &hdev->lock sysfs_symlink_target_lock irq_context: 0 &hdev->req_lock &hdev->lock subsys mutex#74 irq_context: 0 &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock klist_remove_lock irq_context: 0 &hdev->req_lock &hdev->lock &x->wait#9 irq_context: 0 &hdev->req_lock &hdev->lock dpm_list_mtx irq_context: 0 &hdev->req_lock &hdev->lock &dev->power.lock irq_context: 0 &hdev->req_lock &hdev->lock deferred_probe_mutex irq_context: 0 &hdev->req_lock &hdev->lock device_links_lock irq_context: 0 &hdev->req_lock &hdev->lock mmu_notifier_invalidate_range_start irq_context: 0 &hdev->req_lock &hdev->lock &c->lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex pool_lock#2 irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem &c->lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &c->lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#5 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1167 irq_context: 0 &p->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &p->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem stock_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem key irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem pcpu_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem percpu_counters_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc28_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1154 irq_context: 0 fill_pool_map-wait-type-override rcu_node_0 irq_context: 0 fill_pool_map-wait-type-override &rcu_state.expedited_wq irq_context: 0 fill_pool_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock kfence_freelist_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[3] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#6 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss pcpu_lock stock_lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 rtnl_mutex stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 __ip_vs_mutex rcu_read_lock &rq->__lock irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock &meta->lock irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock kfence_freelist_lock irq_context: 0 __ip_vs_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex __ip_vs_mutex __ip_vs_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &batadv_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex deferred_probe_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#981 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#479 irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &n->list_lock irq_context: 0 &sfilter->notify_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#427 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock krc.lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1167 irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 namespace_sem &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#482 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1146 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1146 irq_context: 0 &pipe->mutex/1 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#141 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#139 irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)events (work_completion)(&w->w) quarantine_lock irq_context: 0 &sighand->siglock batched_entropy_u8.lock irq_context: 0 (wq_completion)events (debug_obj_work).work &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 kn->active#52 remove_cache_srcu irq_context: 0 kn->active#52 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#52 remove_cache_srcu &c->lock irq_context: 0 kn->active#52 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#52 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sched_map-wait-type-override stock_lock irq_context: 0 sched_map-wait-type-override key irq_context: 0 sched_map-wait-type-override pcpu_lock irq_context: 0 sched_map-wait-type-override percpu_counters_lock irq_context: 0 sched_map-wait-type-override pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1168 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1168 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1168 irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#142 irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#139 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 kn->active#52 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1196 irq_context: 0 &hdev->req_lock tk_core.seq.seqcount irq_context: 0 &hdev->req_lock hci_sk_list.lock irq_context: 0 &hdev->req_lock &list->lock#5 irq_context: 0 &hdev->req_lock (work_completion)(&hdev->cmd_work) irq_context: 0 &hdev->req_lock (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 &hdev->lock irq_context: 0 &hdev->lock fs_reclaim irq_context: 0 &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &hdev->lock pool_lock#2 irq_context: 0 &hdev->lock tk_core.seq.seqcount irq_context: 0 &hdev->lock hci_sk_list.lock irq_context: 0 &hdev->lock &obj_hash[i].lock irq_context: 0 &hdev->lock &pcp->lock &zone->lock irq_context: 0 &hdev->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &hdev->lock &____s->seqcount irq_context: 0 hci_sk_list.lock irq_context: 0 (work_completion)(&rfkill->uevent_work) irq_context: 0 (work_completion)(&rfkill->sync_work) irq_context: 0 subsys mutex#40 irq_context: 0 subsys mutex#40 &k->k_lock irq_context: 0 subsys mutex#40 &k->k_lock klist_remove_lock irq_context: 0 &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &rfkill->lock irq_context: 0 uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 uevent_sock_mutex nl_table_lock irq_context: 0 uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uevent_sock_mutex nl_table_wait.lock irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 dev_pm_qos_sysfs_mtx &rq->__lock irq_context: 0 dev_pm_qos_sysfs_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 subsys mutex#74 irq_context: 0 subsys mutex#74 &k->k_lock irq_context: 0 subsys mutex#74 &k->k_lock klist_remove_lock irq_context: 0 &sb->s_type->i_mutex_key#3 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &sb->s_type->i_mutex_key#3 pin_fs_lock irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#3 mount_lock irq_context: 0 &sb->s_type->i_mutex_key#3 mount_lock mount_lock.seqcount irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu irq_context: 0 &sb->s_type->i_mutex_key#3 &xa->xa_lock#9 irq_context: 0 &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#3 &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 hci_index_ida.xa_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[3] &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &tun->lock irq_context: 0 rtnl_mutex &dev->tx_global_lock _xmit_NETROM irq_context: 0 rtnl_mutex dev->qdisc_tx_busylock ?: &qdisc_tx_busylock irq_context: 0 rtnl_mutex dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &sch->q.lock irq_context: 0 rtnl_mutex __ip_vs_mutex irq_context: 0 rtnl_mutex __ip_vs_mutex &ipvs->dest_trash_lock irq_context: 0 rtnl_mutex _xmit_ETHER &obj_hash[i].lock irq_context: 0 rtnl_mutex _xmit_ETHER krc.lock irq_context: 0 rtnl_mutex flowtable_lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &nf_conntrack_locks[i] irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock &data->fib_event_queue_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_wait.lock irq_context: 0 rtnl_mutex &tbl->lock pool_lock#2 irq_context: 0 rtnl_mutex &tbl->lock &n->lock irq_context: 0 rtnl_mutex &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 rtnl_mutex &tbl->lock nl_table_lock irq_context: 0 rtnl_mutex &tbl->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &tbl->lock nl_table_wait.lock irq_context: 0 rtnl_mutex &tbl->lock rcu_read_lock lock#8 irq_context: 0 rtnl_mutex &tbl->lock rcu_read_lock id_table_lock irq_context: 0 rtnl_mutex &tbl->lock &dir->lock#2 irq_context: 0 rtnl_mutex &tbl->lock krc.lock irq_context: 0 rtnl_mutex &net->ipv6.addrconf_hash_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock irq_context: 0 &type->i_mutex_dir_key#4 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback &c->lock irq_context: softirq rcu_callback &n->list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &rq->__lock irq_context: softirq rcu_callback &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &base->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &base->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 key#12 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &bgl->locks[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &____s->seqcount#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ebt_mutex ebt_mutex.wait_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &____s->seqcount#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &sbi->s_md_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem lock#4 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &sbi->s_md_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem key#3 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &(ei->i_block_reservation_lock) key#15 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &xa->xa_lock#9 key#11 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock key#11 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock nl_table_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rlock-AF_NETLINK irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock nl_table_wait.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock lock#8 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock id_table_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock krc.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock tk_core.seq.seqcount irq_context: 0 rtnl_mutex &idev->mc_query_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex (work_completion)(&(&idev->mc_report_work)->work) irq_context: 0 rtnl_mutex dev_base_lock &xa->xa_lock#4 irq_context: 0 rtnl_mutex cpu_hotplug_lock &list->lock#12 irq_context: 0 rtnl_mutex bpf_devs_lock irq_context: 0 rtnl_mutex &hwstats->hwsdev_list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &tbl->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem class irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem (&tbl->proxy_timer) irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &base->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem krc.lock irq_context: 0 rtnl_mutex &ul->lock irq_context: 0 rtnl_mutex &net->xdp.lock irq_context: 0 rtnl_mutex mirred_list_lock irq_context: 0 rtnl_mutex &ul->lock#2 irq_context: 0 rtnl_mutex &ul->lock#2 pool_lock#2 irq_context: 0 rtnl_mutex &ul->lock#2 &dir->lock#2 irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock krc.lock irq_context: 0 rtnl_mutex &idev->mc_report_lock irq_context: 0 rtnl_mutex rcu_read_lock sysctl_lock irq_context: 0 rtnl_mutex rcu_read_lock &sb->s_type->i_lock_key#23 irq_context: 0 rtnl_mutex &sb->s_type->i_lock_key#23 irq_context: 0 rtnl_mutex &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 rtnl_mutex &dentry->d_lock irq_context: 0 rtnl_mutex rename_lock.seqcount irq_context: 0 rtnl_mutex rcu_read_lock &dentry->d_lock irq_context: 0 rtnl_mutex &dentry->d_lock &sb->s_type->i_lock_key#23 &lru->node[i].lock irq_context: 0 rtnl_mutex &dentry->d_lock &dentry->d_lock/1 irq_context: 0 rtnl_mutex &dentry->d_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &dentry->d_lock pool_lock#2 irq_context: 0 rtnl_mutex &s->s_inode_list_lock irq_context: 0 rtnl_mutex &xa->xa_lock#9 irq_context: 0 rtnl_mutex &fsnotify_mark_srcu irq_context: 0 rtnl_mutex &dentry->d_lock &lru->node[i].lock irq_context: 0 rtnl_mutex &pnn->pndevs.lock irq_context: 0 rtnl_mutex &pnn->routes.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &vlan_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 krc.lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &n->list_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle tk_core.seq.seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &rq_wait->wait irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &__ctx->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_node_0 irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&hctx->run_work)->work) rcu_read_lock &n->list_lock &c->lock irq_context: hardirq rcu_read_lock &xa->xa_lock#9 key#13 irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &retval->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &nvmeq->sq_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &____s->seqcount irq_context: hardirq &rq_wait->wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &retval->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &nvmeq->sq_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &rq_wait->wait irq_context: 0 rtnl_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 &root->kernfs_rwsem key irq_context: 0 &root->kernfs_rwsem pcpu_lock irq_context: 0 &root->kernfs_rwsem percpu_counters_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &root->kernfs_rwsem &____s->seqcount irq_context: 0 rtnl_mutex subsys mutex#20 &k->k_lock klist_remove_lock irq_context: 0 rtnl_mutex deferred_probe_mutex irq_context: 0 rtnl_mutex device_links_lock irq_context: 0 rtnl_mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &pcp->lock &zone->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) batched_entropy_u8.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: softirq rcu_callback &x->wait#24 &p->pi_lock &rq->__lock irq_context: softirq rcu_callback &x->wait#24 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 dev_base_lock irq_context: 0 lweventlist_lock irq_context: 0 &tun->lock irq_context: 0 &dir->lock#2 irq_context: 0 &dir->lock#2 &obj_hash[i].lock irq_context: 0 &dir->lock#2 pool_lock#2 irq_context: 0 netdev_unregistering_wq.lock irq_context: 0 &sb->s_type->i_mutex_key#10 &dir->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &dir->lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns irq_context: 0 (wq_completion)netns net_cleanup_work irq_context: 0 css_set_lock cgroup_file_kn_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem net_rwsem irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->nsid_lock irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rq->__lock irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock tasklist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &wg->socket_update_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&cb->timer) &rq_wait->wait irq_context: softirq (&cb->timer) &rq_wait->wait &p->pi_lock irq_context: softirq (&cb->timer) &rq_wait->wait &p->pi_lock &rq->__lock irq_context: softirq (&cb->timer) &rq_wait->wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &n->list_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &n->list_lock &c->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 key#11 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 key#13 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &xa->xa_lock#9 key#12 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &____s->seqcount#2 irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &x->wait#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &__ctx->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_node_0 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_callback proc_inum_ida.xa_lock irq_context: softirq rcu_callback put_task_map-wait-type-override css_set_lock irq_context: softirq rcu_callback put_task_map-wait-type-override css_set_lock &obj_hash[i].lock irq_context: softirq rcu_callback put_task_map-wait-type-override css_set_lock pool_lock#2 irq_context: softirq rcu_callback put_task_map-wait-type-override css_set_lock krc.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET k-slock-AF_INET#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-slock-AF_INET#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &table->hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &table->hash[i].lock &table->hash2[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-clock-AF_INET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &sb->s_type->i_lock_key#8 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &xa->xa_lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &fsnotify_mark_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-slock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock k-clock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->endpoint_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &mapping->i_mmap_rwsem &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#30 &sbi->s_writepages_rwsem jbd2_handle &mapping->i_mmap_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_hook_mutex &n->list_lock irq_context: 0 (wq_completion)events_unbound &rq->__lock irq_context: 0 (wq_completion)events_unbound &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_node_0 irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex pool_lock#2 irq_context: 0 tomoyo_ss &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &tn->node_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ebt_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xt[i].mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &nft_net->commit_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_ct_ecache_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 tomoyo_ss &base->lock irq_context: 0 tomoyo_ss &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &n->list_lock &c->lock irq_context: 0 misc_mtx kfence_freelist_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1168 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex.wait_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &table->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock stock_lock irq_context: 0 pernet_ops_rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 pernet_ops_rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock pcpu_lock stock_lock irq_context: 0 &type->i_mutex_dir_key#2 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock pcpu_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 rcu_node_0 irq_context: 0 pernet_ops_rwsem &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem lock kernfs_idr_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem fs_reclaim &rq->__lock irq_context: 0 pernet_ops_rwsem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bcm_notifier_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex isotp_notifier_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &batadv_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &batadv_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &batadv_netdev_addr_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 krc.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pcpu_alloc_mutex rcu_node_0 irq_context: 0 pcpu_alloc_mutex &rcu_state.expedited_wq irq_context: 0 pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &bond->stats_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 krc.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1197 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->keypairs.keypair_update_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &type->i_mutex_dir_key#4 &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_base_lock &xa->xa_lock#4 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_base_lock &xa->xa_lock#4 pool_lock#2 irq_context: 0 put_task_map-wait-type-override#2 &obj_hash[i].lock irq_context: 0 put_task_map-wait-type-override#2 pool_lock#2 irq_context: 0 pernet_ops_rwsem remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex napi_hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex req_lock irq_context: 0 &type->i_mutex_dir_key/1 irq_context: 0 &type->i_mutex_dir_key/1 rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key/1 rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 pool_lock#2 irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss pool_lock#2 irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock &dentry->d_lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock &c->lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock pool_lock#2 irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock &pcp->lock &zone->lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss tomoyo_policy_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#75 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#75 &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#75 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tap_major->minor_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&port->bc_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 quarantine_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &ipvlan->addrs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &ipvlan->addrs_lock irq_context: 0 &fsnotify_mark_srcu remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#3/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nf_hook_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nf_hook_mutex cpu_hotplug_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle key#4 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&port->wq) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER/1 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER/1 &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 misc_mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&timer) irq_context: softirq (&timer) &obj_hash[i].lock irq_context: softirq (&timer) &base->lock irq_context: softirq (&timer) &base->lock &obj_hash[i].lock irq_context: softirq (&timer) rcu_read_lock pool_lock#2 irq_context: softirq (&timer) rcu_read_lock &pcp->lock &zone->lock irq_context: softirq (&timer) rcu_read_lock &____s->seqcount irq_context: softirq (&timer) rcu_read_lock &c->lock irq_context: softirq (&timer) rcu_read_lock &n->list_lock irq_context: softirq (&timer) rcu_read_lock &n->list_lock &c->lock irq_context: softirq (&timer) &txlock irq_context: softirq (&timer) &txlock &list->lock#3 irq_context: softirq (&timer) &txwq irq_context: softirq (&timer) &txwq &p->pi_lock irq_context: softirq (&timer) &txwq &p->pi_lock &rq->__lock irq_context: softirq (&timer) &txwq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock_bh &list->lock#12 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_SLIP#2 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_SLIP#2 &eql->queue.lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_SLIP#2 &eql->queue.lock &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_SLIP#2 &eql->queue.lock pool_lock#2 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM (console_sem).lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_lock console_srcu console_owner_lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_lock console_srcu console_owner irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_lock console_srcu console_owner &port_lock_key irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM console_lock console_srcu console_owner console_owner_lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM pool_lock#2 irq_context: 0 rcu_read_lock_bh &nr_netdev_xmit_lock_key irq_context: 0 rcu_read_lock_bh &nr_netdev_xmit_lock_key nr_node_list_lock irq_context: 0 rcu_read_lock_bh &nr_netdev_xmit_lock_key &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh &nr_netdev_xmit_lock_key pool_lock#2 irq_context: 0 rcu_read_lock_bh _xmit_X25#2 irq_context: 0 rcu_read_lock_bh _xmit_X25#2 &lapbeth->up_lock irq_context: 0 rcu_read_lock_bh _xmit_X25#2 &lapbeth->up_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh _xmit_X25#2 &lapbeth->up_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &k->k_lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work rcu_node_0 irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: softirq (&app->join_timer)#2 batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER/1 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER/1 krc.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_callback put_task_map-wait-type-override per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (&peer->timer_retransmit_handshake) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (&peer->timer_send_keepalive) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (&peer->timer_new_handshake) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (&peer->timer_zero_key_material) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (&peer->timer_persistent_keepalive) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (work_completion)(&peer->clear_peer_work) irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &wq->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &wq->mutex &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &wq->mutex &x->wait#10 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock napi_hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &table->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &list->lock#14 irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock wq_pool_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock wq_pool_mutex &wq->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_read_lock &pool->lock (worker)->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock wq_mayday_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &p->pi_lock irq_context: 0 misc_mtx cpu_hotplug_lock &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex batched_entropy_u8.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex kfence_freelist_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu pool_lock#2 irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex fs_reclaim &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &x->wait irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &r->consumer_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &x->wait#24 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex input_pool.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_read_lock_bh rcu_read_lock pool_lock#2 irq_context: 0 &fsnotify_mark_srcu fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rcu_node_0 irq_context: 0 pernet_ops_rwsem &rcu_state.expedited_wq irq_context: 0 pernet_ops_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &bond->ipsec_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex crngs.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.expedited_wq irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 uts_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: 0 sb_writers#4 &n->list_lock irq_context: 0 sb_writers#4 &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock nl_table_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock nl_table_wait.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock lock#8 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock id_table_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &dir->lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock krc.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock tk_core.seq.seqcount irq_context: 0 rtnl_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &rq->__lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) &pcp->lock &zone->lock &____s->seqcount irq_context: softirq (&p->forward_delay_timer) irq_context: softirq (&p->forward_delay_timer) &br->lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock &c->lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock pool_lock#2 irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock nl_table_lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock &obj_hash[i].lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock nl_table_wait.lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &sem->wait_lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &p->pi_lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &sb->s_type->i_mutex_key#14 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 remove_cache_srcu rcu_node_0 irq_context: 0 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem percpu_counters_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1169 irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock &base->lock irq_context: softirq (&net->sctp.addr_wq_timer) &net->sctp.addr_wq_lock &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_NETROM#2 irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu rcu_node_0 irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &n->list_lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &hsr->list_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#482 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->xfrm.xfrm_state_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock.wait_lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex lweventlist_lock &n->list_lock irq_context: 0 rtnl_mutex lweventlist_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work pool_lock irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci0#11 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock init_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 &n->list_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1155 irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock &n->list_lock &c->lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER &c->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &wq->mutex &pool->lock &p->pi_lock irq_context: softirq rcu_read_lock &br->multicast_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock stock_lock irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock clock-AF_INET irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rq->__lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex fs_reclaim &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex fs_reclaim &obj_hash[i].lock irq_context: 0 &xt[i].mutex fs_reclaim pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sched_map-wait-type-override &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#52 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex pool_lock#2 irq_context: 0 rtnl_mutex remove_cache_srcu pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->forw_bcast_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->forw_bat_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->gw.list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->bat_v.ogm_wq)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->bat_v.ogm_buff_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->bat_v.ogm_buff_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->bat_v.ogm_buff_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tvlv.container_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tvlv.handler_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->nc.work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#17 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#18 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tvlv.container_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tvlv.container_list_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->dat.work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &hash->list_locks[i] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->bla.work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#20 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#20 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#20 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#20 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->mcast.work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tvlv.container_list_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->tt.work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 &bat_priv->softif_vlan_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#16 krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#21 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tt.req_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tt.changes_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tt.changes_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tt.changes_list_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &bat_priv->tt.roam_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&bat_priv->orig_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem key#19 irq_context: 0 &xt[i].mutex quarantine_lock irq_context: softirq rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem wq_mayday_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &hn->hn_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &x->wait#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)inet_frag_wq irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) (work_completion)(&ht->run_work) irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) &ht->mutex irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) &ht->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) &ht->mutex pool_lock#2 irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq (&peer->timer_persistent_keepalive) &n->list_lock irq_context: softirq (&peer->timer_persistent_keepalive) &n->list_lock &c->lock irq_context: softirq rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 kn->active#13 &n->list_lock irq_context: 0 kn->active#13 &n->list_lock &c->lock irq_context: 0 (wq_completion)events fqdir_free_work irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex &x->wait#24 irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex &rq->__lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events fqdir_free_work &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &dev->tx_global_lock _xmit_NONE#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events fqdir_free_work pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &this->info_list_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock smack_known_lock.wait_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &pnettable->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &pnetids_ndev->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6/1 irq_context: 0 rtnl_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dentry->d_lock &sb->s_type->i_lock_key#24 &lru->node[i].lock irq_context: 0 &root->kernfs_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6/1 k-slock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6/1 rlock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6/1 &list->lock#17 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 &sctp_ep_hashtable[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock k-slock-AF_INET6/1 k-clock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex.wait_lock irq_context: 0 vlan_ioctl_mutex &p->pi_lock irq_context: 0 vlan_ioctl_mutex &p->pi_lock &rq->__lock irq_context: 0 vlan_ioctl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &ul->lock irq_context: 0 misc_mtx cpu_hotplug_lock rcu_node_0 irq_context: 0 sb_writers#3 jbd2_handle rcu_node_0 irq_context: softirq rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &pipe->mutex/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1147 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_INET6 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_INET6 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &meta_group_info[i]->alloc_sem &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &pcp->lock &zone->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem netns_bpf_mutex irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex fs_reclaim &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_node_0 irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&net->fs_probe_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->cells_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&net->cells_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem bit_wait_table + i irq_context: 0 (wq_completion)afs irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) &net->cells_lock irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) bit_wait_table + i irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) bit_wait_table + i &p->pi_lock irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) bit_wait_table + i &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) bit_wait_table + i &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&net->fs_timer) irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) &(&net->fs_lock)->lock irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) bit_wait_table + i irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) bit_wait_table + i &p->pi_lock irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) bit_wait_table + i &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) bit_wait_table + i &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC k-slock-AF_RXRPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rx->incoming_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->conn_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &call->waitq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC rcu_read_lock &call->notify_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC (rxrpc_call_limiter).lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rx->recvmsg_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rx->call_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->call_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC (&call->timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &list->lock#18 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_RXRPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)kafsd irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex &x->wait#10 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-clock-AF_RXRPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &local->services_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)krxrpcd irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rlock-AF_RXRPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &x->wait irq_context: 0 (&local->client_conn_reap_timer) irq_context: 0 &rxnet->conn_lock irq_context: 0 &table->hash[i].lock irq_context: 0 &table->hash[i].lock &table->hash2[i].lock irq_context: 0 k-clock-AF_INET6 irq_context: 0 &list->lock#19 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_lock_key#8 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xa->xa_lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &fsnotify_mark_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem proc_subdir_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ent->pde_unload_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem proc_inum_ida.xa_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex (work_completion)(&data->gc_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex net_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&ovs_net->masks_rebalance)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&ovs_net->dp_notify_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &srv->idr_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem wq_pool_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem wq_pool_mutex &wq->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC k-slock-AF_TIPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#476 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#926 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &meta->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex quarantine_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#926 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1056 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &sem->wait_lock irq_context: 0 &mm->mmap_lock &p->pi_lock irq_context: 0 &mm->mmap_lock &p->pi_lock &rq->__lock irq_context: softirq &(&gc_work->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &mm->mmap_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 pernet_ops_rwsem nf_hook_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#16 irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &cfs_rq->removed.lock irq_context: 0 cgroup_threadgroup_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] &____s->seqcount#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock stock_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &nt->cluster_scope_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC k-clock-AF_TIPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_TIPC irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ptype_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (switchdev_blocking_notif_chain).rwsem &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#496 irq_context: 0 sb_writers#4 &____s->seqcount irq_context: 0 rtnl_mutex team->team_lock_key#83 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (switchdev_blocking_notif_chain).rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&tn->work) irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 rtnl_mutex &root->kernfs_rwsem &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 rtnl_mutex lock kernfs_idr_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex &rnp->exp_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 &p->lock &of->mutex kn->active#4 &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rnp->exp_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rnp->exp_wq[2] irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&ht->run_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&c->work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-clock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)krdsd irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rtn->rds_tcp_accept_w) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 &h->lhash2[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 &icsk->icsk_accept_queue.rskq_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &tcp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 &tcp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rds_tcp_conn_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem loop_conns_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)l2tp irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &x->wait#24 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&rxnet->peer_keepalive_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rxnet->peer_keepalive_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&rxnet->service_conn_reap_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &x->wait#10 irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->service_conn_reaper) irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->service_conn_reaper) &rxnet->conn_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rxnet->conn_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex netpoll_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pn->hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tn->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_NONE#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_ETHER#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#16 &n->list_lock &c->lock irq_context: 0 kn->active#16 &rq->__lock irq_context: 0 kn->active#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &vma->vm_lock->lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &vma->vm_lock->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sch->q.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev->qdisc_tx_busylock ?: &qdisc_tx_busylock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &sch->q.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex __ip_vs_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex __ip_vs_mutex &ipvs->dest_trash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &im->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex class irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&tbl->proxy_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex flowtable_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock rt6_exception_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ndev->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ndev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ndev->lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ndev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ifa->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tb->tb6_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&(&idev->mc_report_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.fib6_gc_lock rcu_read_lock &tb->tb6_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.fib6_gc_lock rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.fib6_gc_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 batched_entropy_u8.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 kfence_freelist_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &n->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &n->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock rcu_read_lock lock#8 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock rcu_read_lock id_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_base_lock &xa->xa_lock#4 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock &list->lock#12 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &in_dev->mc_tomb_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &tbl->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem class irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem (&tbl->proxy_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ul->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->xdp.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex mirred_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &nft_net->commit_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex proc_subdir_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ent->pde_unload_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_report_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->pndevs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnn->routes.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pnettable->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex smc_ib_devices.mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex target_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_NONE irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &k->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#20 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex subsys mutex#20 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &x->wait#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dpm_list_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->power.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex deferred_probe_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex device_links_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock xps_map_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#83 fs_reclaim irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_node_0 irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_node_0 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_base_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem lweventlist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem napi_hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem netdev_unregistering_wq.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock tk_core.seq.seqcount irq_context: 0 rtnl_mutex subsys mutex#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_TUNNEL6#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_TUNNEL6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 rcu_read_lock &rq->__lock irq_context: 0 tomoyo_ss remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock sb_writers#5 &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 _xmit_ETHER &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->tx_global_lock _xmit_SIT#2 irq_context: 0 tomoyo_ss rcu_node_0 irq_context: 0 tomoyo_ss &rcu_state.expedited_wq irq_context: 0 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock irq_context: 0 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_SIT irq_context: 0 rtnl_mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock key irq_context: 0 cb_lock genl_mutex rfkill_global_mutex lock kernfs_idr_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock pcpu_lock irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock percpu_counters_lock irq_context: 0 &tsk->futex_exit_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle bit_wait_table + i irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fq->mq_flush_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex.wait_lock irq_context: 0 &mdev->req_queue_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1178 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1178 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1181 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1184 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1184 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy161 irq_context: 0 &ret->b_state_lock &journal->j_list_lock rcu_read_lock &xa->xa_lock#9 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &dev->mutex rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &____s->seqcount irq_context: 0 rtnl_mutex gdp_mutex &____s->seqcount#2 irq_context: 0 rtnl_mutex gdp_mutex &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock &n->list_lock &c->lock irq_context: softirq (&peer->timer_persistent_keepalive) &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#27 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#27 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock pcpu_lock stock_lock irq_context: 0 &dir->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex key#8 irq_context: 0 rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock pool_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock rcu_node_0 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rq->__lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ul->lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#272 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_TUNNEL irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock kfence_freelist_lock irq_context: softirq rcu_callback &x->wait#24 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock subsys mutex#76 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &k->k_lock klist_remove_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex device_links_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &k->list_lock irq_context: 0 &dentry->d_lock &sb->s_type->i_lock_key#23 &lru->node[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rq->__lock cpu_asid_lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &rq->__lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx fs_reclaim &cfs_rq->removed.lock irq_context: 0 misc_mtx fs_reclaim &obj_hash[i].lock irq_context: 0 misc_mtx fs_reclaim pool_lock#2 irq_context: 0 &dev->mutex remove_cache_srcu irq_context: 0 &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 &dev->mutex remove_cache_srcu &c->lock irq_context: 0 &dev->mutex remove_cache_srcu &n->list_lock irq_context: 0 &dev->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &dev->mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &dev->mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &meta->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 cb_lock remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#26 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rq->__lock cpu_asid_lock irq_context: 0 misc_mtx remove_cache_srcu &rq->__lock irq_context: 0 misc_mtx remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex rcu_node_0 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_QIPCRTR irq_context: 0 sk_lock-AF_QIPCRTR slock-AF_QIPCRTR irq_context: 0 slock-AF_QIPCRTR irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1197 irq_context: 0 major_names_lock &c->lock irq_context: 0 major_names_lock &obj_hash[i].lock irq_context: 0 major_names_lock loop_ctl_mutex irq_context: 0 major_names_lock &pcp->lock &zone->lock irq_context: 0 major_names_lock &____s->seqcount irq_context: 0 sb_writers &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &obj_hash[i].lock pool_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &____s->seqcount#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 &q->sysfs_dir_lock kernfs_idr_lock irq_context: 0 &type->i_mutex_dir_key/1 &fsnotify_mark_srcu &conn->lock irq_context: 0 &type->i_mutex_dir_key/1 &conn->lock irq_context: 0 &type->i_mutex_dir_key/1 &group->mark_mutex irq_context: 0 &type->i_mutex_dir_key/1 &group->mark_mutex &mark->lock irq_context: 0 &type->i_mutex_dir_key/1 &mark->lock irq_context: 0 &type->i_mutex_dir_key/1 fs_reclaim irq_context: 0 &type->i_mutex_dir_key/1 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key/1 &group->notification_lock irq_context: 0 &type->i_mutex_dir_key/1 &group->notification_waitq irq_context: 0 &type->i_mutex_dir_key/1 &group->notification_waitq &ep->lock irq_context: 0 &type->i_mutex_dir_key/1 &group->inotify_data.idr_lock irq_context: 0 &type->i_mutex_dir_key/1 &group->inotify_data.idr_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 &group->inotify_data.idr_lock pool_lock#2 irq_context: 0 &type->i_mutex_dir_key/1 destroy_lock irq_context: 0 &type->i_mutex_dir_key/1 rcu_read_lock &pool->lock irq_context: 0 &type->i_mutex_dir_key/1 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &type->i_mutex_dir_key/1 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key/1 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key/1 &base->lock irq_context: 0 &type->i_mutex_dir_key/1 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers &sb->s_type->i_mutex_key#4 tomoyo_ss &____s->seqcount#2 irq_context: 0 &type->i_mutex_dir_key/1 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 &q->debugfs_mutex &obj_hash[i].lock pool_lock irq_context: 0 &type->i_mutex_dir_key/1 &group->notification_waitq &ep->lock &ep->wq irq_context: 0 &type->i_mutex_dir_key/1 &group->notification_waitq &ep->lock &ep->wq &p->pi_lock irq_context: 0 &type->i_mutex_dir_key/1 &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key/1 &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &q->blkcg_mutex &rq->__lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 uevent_sock_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: softirq rcu_callback pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1197 &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rcu_state.gp_wq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback rcu_read_lock &q->mq_freeze_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#513 irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#513 &rq->__lock irq_context: 0 crtc_ww_class_mutex &rq->__lock irq_context: 0 crtc_ww_class_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#513 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&conn->disc_work)->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 rtnl_mutex dev_addr_sem &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#131 irq_context: 0 misc_mtx per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 misc_mtx stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#513 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &p->lock batched_entropy_u8.lock irq_context: 0 &f->f_pos_lock &p->lock kfence_freelist_lock irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock fs_reclaim irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &sem->wait_lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#7 irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &q->blkcg_mutex pool_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle rcu_node_0 irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx net_rwsem &____s->seqcount irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#530 irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock tk_core.seq.seqcount irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &wq &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &dentry->d_lock &wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET &rq->__lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#118 irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#118 irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#4 irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock fs_reclaim irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#196 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) pool_lock#2 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#575 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#575 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#575 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq net/core/link_watch.c:31 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock batched_entropy_u8.lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock quarantine_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->id_addr_timer)->work) irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &x->wait#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1376 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1259 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1542 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1259 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1256 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1256 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#557 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#553 irq_context: 0 &mm->mmap_lock fs_reclaim key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock fs_reclaim pcpu_lock irq_context: 0 &mm->mmap_lock fs_reclaim percpu_counters_lock irq_context: 0 rtnl_mutex &ul->lock#2 &n->list_lock irq_context: 0 rtnl_mutex &ul->lock#2 &n->list_lock &c->lock irq_context: 0 &data->open_mutex gdp_mutex &rq->__lock irq_context: 0 &data->open_mutex gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#26 irq_context: 0 namespace_sem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1197 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1280 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1280 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1281 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#574 &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &base->lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#5 irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 pcpu_alloc_mutex stock_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 misc_mtx rfkill_global_mutex &n->list_lock irq_context: 0 misc_mtx rfkill_global_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&dom->period_timer) &p->sequence key#13 irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 sb_internal kfence_freelist_lock irq_context: 0 sb_writers#3 sb_internal &meta->lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) rcu_node_0 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_IPGRE &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_IPGRE pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_IPGRE krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_mutex.wait_lock irq_context: 0 rtnl_mutex stock_lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire &lock->wait_lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire &p->pi_lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.barrier_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) &list->lock#5 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &br->hash_lock quarantine_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &list->lock#7 irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->sync_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem hwsim_radio_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem hwsim_radio_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem pin_fs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 pin_fs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &xa->xa_lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &s->s_inode_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (inetaddr_chain).rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem inet6addr_chain.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (debug_obj_work).work kfence_freelist_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1147 &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start stock_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&cnet->ecache.dwork)->work) irq_context: 0 cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.waiters.lock rcu_read_lock &p->pi_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem percpu_counters_lock irq_context: 0 pcpu_alloc_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &wdev->pmsr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->wiphy_work_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem pcpu_lock stock_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &c->lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem.waiters.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &fq->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx pin_fs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pin_fs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &xa->xa_lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &fsnotify_mark_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_lock_key#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &s->s_inode_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &xa->xa_lock#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock &dentry->d_lock/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx mount_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx mount_lock mount_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &local->active_txq_lock[i] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (work_completion)(&sta->drv_deliver_wk) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ifibss->incomplete_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx lweventlist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx hrtimer_bases.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &data->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &local->queue_stop_reason_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &fq->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &local->queue_stop_reason_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &x->wait#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_crypto#8 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&wdev->disconnect_wk) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&wdev->pmsr_free_wk) irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx _xmit_ETHER &local->filter_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (&local->dynamic_ps_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (&dwork->timer)#3 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (&dwork->timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &____s->seqcount irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &list->lock#16 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &wq->mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &wq->mutex &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &wq->mutex &x->wait#10 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->dev_wait irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &local->iflist_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_base_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_base_lock &xa->xa_lock#4 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx cpu_hotplug_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx cpu_hotplug_lock &list->lock#12 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx bpf_devs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx net_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx net_rwsem &list->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysctl_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &tbl->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx class irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (&tbl->proxy_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ul->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &net->xdp.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx mirred_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &nft_net->commit_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &tn->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &tb->tb6_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx proc_subdir_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ent->pde_unload_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx proc_inum_ida.xa_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &net->ipv6.addrconf_hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ndev->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ndev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_query_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_query_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (work_completion)(&(&idev->mc_report_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_report_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &pnn->pndevs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &pnn->routes.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &pnettable->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx smc_ib_devices.mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx target_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem tcp_metrics_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-clock-AF_INET irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex bus_type_sem &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (work_completion)(&(&link->color_collision_detect_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx sysfs_symlink_target_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &k->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &dev->power.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &x->wait#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dpm_list_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &dev->power.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx deferred_probe_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx device_links_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &k->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx cpu_hotplug_lock xps_map_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->mgmt_registrations_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &wdev->event_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &fq->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &fq->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &fq->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &fq->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &fq->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (&dwork->timer)#4 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) rcu_node_0 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1147 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex.wait_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &p->pi_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &wb->list_lock &sb->s_type->i_lock_key#24 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#469 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&local->restart_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &list->lock#16 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rdev->wiphy.mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rfkill->uevent_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rfkill->sync_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: softirq rcu_callback key#23 irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &k->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysfs_symlink_target_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem subsys mutex#40 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem subsys mutex#40 &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem subsys mutex#40 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &x->wait#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dpm_list_mtx irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->power.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem deferred_probe_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem device_links_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rfkill->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem triggers_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem leds_list_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#8 &c->lock irq_context: 0 sb_writers#8 &n->list_lock irq_context: 0 sb_writers#8 &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1148 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &xa->xa_lock#19 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex devlinks.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex klist_remove_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &k->k_lock klist_remove_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &x->wait#9 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dpm_list_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock deferred_probe_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock device_links_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock sysfs_symlink_target_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx subsys mutex#54 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->wiphy_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->conn_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->event_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&rdev->dfs_update_channels_wk)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&rdev->background_cac_done_wk)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->destroy_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->propagate_radar_detect_wk) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->propagate_cac_done_wk) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->mgmt_registrations_update_wk) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&rdev->background_cac_abort_wk) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &dev->power.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex device_links_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem subsys mutex#53 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem subsys mutex#53 &k->k_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem subsys mutex#53 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem gdp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&local->sta_cleanup) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock crngs.lock irq_context: 0 pernet_ops_rwsem nf_hook_mutex &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &dev->mutex (work_completion)(&rfkill->uevent_work) &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)bond0#76 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 batched_entropy_u8.lock crngs.lock irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock &nsim_dev->fa_cookie_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1148 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#616 irq_context: 0 &mm->mmap_lock sb_writers#3 stock_lock irq_context: 0 &mm->mmap_lock sb_writers#3 key irq_context: 0 &mm->mmap_lock sb_writers#3 pcpu_lock irq_context: 0 &mm->mmap_lock sb_writers#3 percpu_counters_lock irq_context: 0 &mm->mmap_lock sb_writers#3 pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#616 &rq->__lock irq_context: 0 &xt[i].mutex remove_cache_srcu &base->lock irq_context: 0 &xt[i].mutex remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#981 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: softirq (&peer->timer_persistent_keepalive) &pcp->lock &zone->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#197 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock batched_entropy_u8.lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: softirq rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &p->lock remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &p->lock remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &p->lock remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#616 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1339 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#12 &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#138 irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_highpri (work_completion)(flush) irq_context: 0 (wq_completion)events_highpri (work_completion)(flush) &list->lock#12 irq_context: 0 (wq_completion)events_highpri (work_completion)(&barr->work) irq_context: 0 (wq_completion)events_highpri (work_completion)(&barr->work) &x->wait#10 irq_context: 0 (wq_completion)events_highpri (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)events_highpri (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_highpri (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pcpu_alloc_mutex key irq_context: 0 pcpu_alloc_mutex percpu_counters_lock irq_context: 0 pcpu_alloc_mutex pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &rcu_state.expedited_wq irq_context: 0 rtnl_mutex k-sk_lock-AF_INET &rq->__lock irq_context: 0 rtnl_mutex k-sk_lock-AF_INET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_state.barrier_mutex stock_lock irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 &f->f_pos_lock &p->lock rcu_read_lock rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1148 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ul->lock#2 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ul->lock#2 &dir->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &base->lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#354 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1148 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1148 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1148 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1148 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1149 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1149 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1149 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#354 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1149 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1150 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1150 &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem percpu_counters_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1150 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1150 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1150 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1150 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &x->wait#3 irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex gdp_mutex &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1150 irq_context: 0 &f->f_pos_lock &p->lock rcu_read_lock &rq->__lock irq_context: 0 &f->f_pos_lock &p->lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_base_lock &xa->xa_lock#4 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#926 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#926 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#349 irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#347 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1293 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1294 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#584 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 batched_entropy_u8.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#583 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#7 irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex key irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#7 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock &c->lock irq_context: 0 (wq_completion)nfc36_nci_rx_wq#4 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock cpu_asid_lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc36_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#953 irq_context: 0 rcu_state.barrier_mutex key irq_context: 0 rcu_state.barrier_mutex pcpu_lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 rcu_state.barrier_mutex percpu_counters_lock irq_context: 0 rcu_state.barrier_mutex pcpu_lock stock_lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &____s->seqcount#2 irq_context: 0 rtnl_mutex &br->lock lweventlist_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &br->lock lweventlist_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) pool_lock#2 irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1269 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#12 &rq->__lock irq_context: 0 kn->active#50 &n->list_lock irq_context: 0 kn->active#50 &n->list_lock &c->lock irq_context: 0 kn->active#51 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 kn->active#51 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 nf_nat_proto_mutex &n->list_lock irq_context: 0 nf_nat_proto_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &n->list_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &lock->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &lock->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &u->iolock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &lock->wait_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &lock->wait_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rq->__lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET k-slock-AF_INET#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET k-clock-AF_INET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_INET#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_INET#2 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-slock-AF_INET#2 pool_lock#2 irq_context: 0 &dev->master_mutex irq_context: 0 &dev->master_mutex fs_reclaim irq_context: 0 &dev->master_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->master_mutex &c->lock irq_context: 0 &dev->master_mutex pool_lock#2 irq_context: 0 &dev->master_mutex &file->master_lookup_lock irq_context: 0 &dev->master_mutex &obj_hash[i].lock irq_context: 0 &dev->master_mutex &rq->__lock irq_context: 0 &dev->master_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex &rq->__lock cpu_asid_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#5 irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &n->list_lock irq_context: 0 &dev->mutex remove_cache_srcu &rq->__lock irq_context: 0 &dev->mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dir->lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET slock-AF_PHONET irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_node_0 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#413 &rq->__lock irq_context: 0 sk_lock-AF_PACKET &rnp->exp_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex triggers_list_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex triggers_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#413 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1299 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#13 irq_context: 0 __ip_vs_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &base->lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1294 &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#376 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#376 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#11 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &base->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 sb_writers#3 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#985 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#13 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: softirq (&sk->sk_timer) irq_context: softirq (&sk->sk_timer) slock-AF_INET#2 irq_context: 0 rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key &c->lock irq_context: softirq (&peer->timer_send_keepalive) &____s->seqcount#2 irq_context: softirq (&peer->timer_send_keepalive) &____s->seqcount irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#972 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#386 irq_context: 0 sb_writers#7 &of->mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_node_0 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sn->gssp_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &cd->hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem cache_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem cache_list_lock &cd->hash_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&net->can.stattimer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem vmap_area_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem purge_vmap_area_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem purge_vmap_area_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem xfrm_state_gc_work irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->xfrm.xfrm_state_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &hashinfo->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-clock-AF_INET6 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock sysctl_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &sb->s_type->i_lock_key#23 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_lock_key#23 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_lock_key#23 &dentry->d_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rename_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock &sb->s_type->i_lock_key#23 &lru->node[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock &lru->node[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&net->ipv6.addr_chk_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ip6_fl_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->rules_mod_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&net->ipv6.ip6_fib_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (&mrt->ipmr_expire_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (&ipvs->dest_trash_timer) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&ipvs->expire_nodest_conn_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&ipvs->defense_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&(&ipvs->est_reload_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nfnl_subsys_ipset irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem recent_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem hashlimit_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem trans_gc_work irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_log_mutex irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#379 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#7 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_node_0 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &nf_conntrack_locks[i] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &____s->seqcount#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex rcu_read_lock &nf_nat_locks[i] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sn->pipefs_sb_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)inet_frag_wq (work_completion)(&fqdir->destroy_work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ht->mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&net->xfrm.policy_hash_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->xfrm.xfrm_policy_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (work_completion)(&net->xfrm.state_hash_work) irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &xa->xa_lock#4 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem genl_sk_destructing_waitq.lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex rcu_state.barrier_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex.wait_lock irq_context: 0 (wq_completion)events fqdir_free_work &p->pi_lock irq_context: 0 (wq_completion)events fqdir_free_work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events fqdir_free_work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock krc.lock &base->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work krc.lock irq_context: 0 (wq_completion)netns net_cleanup_work &dir->lock irq_context: 0 (wq_completion)netns net_cleanup_work &dir->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work &dir->lock pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex bus_type_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &meta->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_node_0 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1294 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#972 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#972 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fsnotify_mark_srcu &group->notification_waitq &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#149 irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &fsnotify_mark_srcu fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu quarantine_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc3_nci_rx_wq#553 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock xps_map_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 &dev->mutex (work_completion)(&rfkill->sync_work) &rq->__lock irq_context: 0 &dev->mutex (work_completion)(&rfkill->sync_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1542 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 &dev->mutex uevent_sock_mutex.wait_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex &batadv_netdev_addr_lock_key/1 &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (debug_obj_work).work &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &obj_hash[i].lock irq_context: softirq &(&gc_work->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&gc_work->dwork)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem (&timer.timer) irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem fw_lock &x->wait#22 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem kernfs_idr_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem subsys mutex#73 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem deferred_probe_mutex irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem device_links_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex sysfs_symlink_target_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) umhelper_sem gdp_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) fw_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex reg_indoor_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex krc.lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex reg_requests_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex reg_pending_beacons_lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex fs_reclaim irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&fw_work->work) rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex remove_cache_srcu irq_context: 0 (wq_completion)events reg_work rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events reg_work rtnl_mutex &c->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1543 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1261 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#86 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#86 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex _xmit_ETHER &n->list_lock irq_context: 0 rtnl_mutex _xmit_ETHER &n->list_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem batched_entropy_u8.lock irq_context: 0 rtnl_mutex dev_addr_sem kfence_freelist_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 nfc_devlist_mutex subsys mutex#39 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 sb_writers#7 kn->active#4 batched_entropy_u8.lock irq_context: 0 sb_writers#7 kn->active#4 kfence_freelist_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1261 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#80 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sbinfo->stat_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1261 irq_context: 0 cb_lock genl_mutex rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tt.last_changeset_lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1070 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &n->list_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1070 &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rq->__lock &cfs_rq->removed.lock irq_context: 0 nf_hook_mutex &n->list_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_node_0 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1070 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1262 irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 nl_table_lock nl_table_wait.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem __ip_vs_app_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#51 &____s->seqcount#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#11 irq_context: 0 &mm->mmap_lock sb_writers#3 &n->list_lock irq_context: 0 &mm->mmap_lock sb_writers#3 &n->list_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss batched_entropy_u8.lock irq_context: 0 wq_pool_mutex wq_pool_mutex.wait_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &base->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex key irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex pcpu_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex percpu_counters_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &pcp->lock &zone->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1262 irq_context: 0 &mm->mmap_lock lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock pool_lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &n->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &n->lock &____s->seqcount#8 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock nl_table_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock nl_table_wait.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock lock#8 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock rcu_read_lock id_table_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &dir->lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock krc.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &obj_hash[i].lock pool_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1262 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#562 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#563 irq_context: 0 br_ioctl_mutex rtnl_mutex stock_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#189 irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#564 irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start key irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#564 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1265 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#192 irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &mm->mmap_lock mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#187 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#565 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &rq->__lock irq_context: 0 &kernfs_locks->open_file_mutex[count] rcu_node_0 irq_context: 0 (wq_completion)events_power_efficient &rq->__lock irq_context: 0 br_ioctl_mutex rtnl_mutex &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock init_task.mems_allowed_seq.seqcount irq_context: 0 &dev->mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex kfence_freelist_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 br_ioctl_mutex rtnl_mutex key irq_context: 0 br_ioctl_mutex rtnl_mutex pcpu_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &root->kernfs_rwsem &meta->lock irq_context: 0 &dev->mutex &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &mapping->i_private_lock irq_context: 0 br_ioctl_mutex rtnl_mutex percpu_counters_lock irq_context: 0 br_ioctl_mutex rtnl_mutex pcpu_lock stock_lock irq_context: 0 br_ioctl_mutex rtnl_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#565 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#559 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#557 irq_context: 0 &dev->mutex &root->kernfs_rwsem key irq_context: 0 &dev->mutex &root->kernfs_rwsem pcpu_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem percpu_counters_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work krc.lock &base->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount#2 irq_context: 0 pcpu_alloc_mutex free_vmap_area_lock &obj_hash[i].lock irq_context: 0 &fsnotify_mark_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem key irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 &fsnotify_mark_srcu fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1267 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#566 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1269 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#193 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#193 &rq->__lock irq_context: 0 br_ioctl_mutex rtnl_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock key#6 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &base->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rq->__lock cpu_asid_lock irq_context: 0 br_ioctl_mutex rtnl_mutex &____s->seqcount irq_context: 0 br_ioctl_mutex rtnl_mutex pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#193 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#193 irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &n->list_lock &c->lock irq_context: 0 uevent_sock_mutex &____s->seqcount#2 irq_context: 0 sb_writers#3 stock_lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#190 irq_context: 0 misc_mtx remove_cache_srcu &base->lock irq_context: 0 misc_mtx remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock kfence_freelist_lock irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.gp_wq irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.gp_wq &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1269 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1270 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1270 irq_context: 0 devpts_mutex irq_context: 0 devpts_mutex &xa->xa_lock#21 irq_context: 0 tty_mutex stock_lock irq_context: 0 tty_mutex &tty->legacy_mutex fs_reclaim irq_context: 0 tty_mutex &tty->legacy_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 tty_mutex &tty->legacy_mutex pool_lock#2 irq_context: 0 tty_mutex &tty->legacy_mutex stock_lock irq_context: 0 tty_mutex &tty->legacy_mutex tty_ldiscs_lock irq_context: 0 tty_mutex &tty->legacy_mutex &obj_hash[i].lock irq_context: 0 tty_mutex &tty->legacy_mutex &obj_hash[i].lock pool_lock irq_context: 0 tty_mutex &tty->legacy_mutex &k->list_lock irq_context: 0 tty_mutex &tty->legacy_mutex &k->k_lock irq_context: 0 tty_mutex &tty->legacy_mutex &rq->__lock irq_context: 0 tty_mutex &tty->legacy_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tty_mutex &tty->legacy_mutex &cfs_rq->removed.lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &pcp->lock &zone->lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &n->list_lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &n->list_lock &c->lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &o_tty->termios_rwsem/1 irq_context: 0 &tty->legacy_mutex fs_reclaim irq_context: 0 &tty->legacy_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &tty->legacy_mutex &c->lock irq_context: 0 &tty->legacy_mutex &xa->xa_lock#5 irq_context: 0 &tty->legacy_mutex &xa->xa_lock#5 pool_lock#2 irq_context: 0 &tty->legacy_mutex stock_lock irq_context: 0 &tty->legacy_mutex mmu_notifier_invalidate_range_start irq_context: 0 &tty->legacy_mutex &sb->s_type->i_lock_key#25 irq_context: 0 &tty->legacy_mutex &s->s_inode_list_lock irq_context: 0 &tty->legacy_mutex tk_core.seq.seqcount irq_context: 0 &tty->legacy_mutex &dentry->d_lock irq_context: 0 &tty->legacy_mutex &sb->s_type->i_lock_key#25 &dentry->d_lock irq_context: 0 &tty->legacy_mutex/1 irq_context: 0 &tty->legacy_mutex/1 tasklist_lock irq_context: 0 &tty->legacy_mutex &tty->read_wait irq_context: 0 &tty->legacy_mutex &tty->write_wait irq_context: 0 &tty->legacy_mutex &tty->ctrl.lock irq_context: 0 &tty->legacy_mutex devpts_mutex irq_context: 0 &tty->legacy_mutex devpts_mutex &dentry->d_lock irq_context: 0 &tty->legacy_mutex devpts_mutex rcu_read_lock &dentry->d_lock irq_context: 0 &tty->legacy_mutex devpts_mutex &fsnotify_mark_srcu irq_context: 0 &tty->legacy_mutex devpts_mutex &sb->s_type->i_lock_key#25 irq_context: 0 &tty->legacy_mutex devpts_mutex &s->s_inode_list_lock irq_context: 0 &tty->legacy_mutex devpts_mutex &xa->xa_lock#9 irq_context: 0 &tty->legacy_mutex devpts_mutex &obj_hash[i].lock irq_context: 0 &tty->legacy_mutex devpts_mutex pool_lock#2 irq_context: 0 &tty->legacy_mutex devpts_mutex &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &tty->legacy_mutex devpts_mutex &dentry->d_lock &obj_hash[i].lock irq_context: 0 &tty->legacy_mutex devpts_mutex &dentry->d_lock pool_lock#2 irq_context: 0 &tty->legacy_mutex devpts_mutex &rq->__lock irq_context: 0 &tty->legacy_mutex devpts_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tty->legacy_mutex redirect_lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->files_lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 tasklist_lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &o_tty->termios_rwsem/1 irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &buf->lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->write_wait irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->read_wait irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &o_tty->termios_rwsem/1 irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &o_tty->termios_rwsem/1 vmap_area_lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &o_tty->termios_rwsem/1 &obj_hash[i].lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &o_tty->termios_rwsem/1 purge_vmap_area_lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &o_tty->termios_rwsem/1 purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &o_tty->termios_rwsem/1 purge_vmap_area_lock pool_lock#2 irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &o_tty->termios_rwsem/1 pool_lock#2 irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem tty_ldiscs_lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &obj_hash[i].lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem pool_lock#2 irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ctrl.lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &obj_hash[i].lock irq_context: 0 &tty->ldisc_sem irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &tty->termios_rwsem irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &tty->termios_rwsem vmap_area_lock irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &tty->termios_rwsem &obj_hash[i].lock irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &tty->termios_rwsem purge_vmap_area_lock irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &tty->termios_rwsem purge_vmap_area_lock &obj_hash[i].lock irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &tty->termios_rwsem purge_vmap_area_lock pool_lock#2 irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &tty->termios_rwsem pool_lock#2 irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 tty_ldiscs_lock irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &obj_hash[i].lock irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 pool_lock#2 irq_context: 0 &tty->ldisc_sem/1 irq_context: 0 (work_completion)(&tty->SAK_work) irq_context: 0 (work_completion)(&tty->hangup_work) irq_context: 0 (work_completion)(&tty->hangup_work) &rq->__lock irq_context: 0 (work_completion)(&tty->hangup_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tty_mutex &xa->xa_lock#21 irq_context: 0 tty_mutex (work_completion)(&buf->work) irq_context: 0 tty_mutex rcu_read_lock &pool->lock irq_context: 0 tty_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 tty_mutex rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 tty_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 tty_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem lock kernfs_idr_lock &c->lock irq_context: 0 pernet_ops_rwsem lock kernfs_idr_lock &n->list_lock irq_context: 0 pernet_ops_rwsem lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &meta->lock irq_context: 0 sb_writers#4 oom_adj_mutex oom_adj_mutex.wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 oom_adj_mutex.wait_lock irq_context: 0 sb_writers#4 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &n->list_lock &c->lock irq_context: 0 &ndev->req_lock &rq->__lock irq_context: 0 &ndev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET &pnsocks.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET resource_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->endpoint_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->endpoint_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dev_addr_sem &meta->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1270 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim &rq->__lock irq_context: 0 sb_writers#3 sb_internal mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 sb_internal mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 rtnl_mutex &macsec_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#6 stock_lock irq_context: 0 sb_writers#6 pcpu_lock stock_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &n->list_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#568 irq_context: softirq rcu_callback &x->wait#24 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#568 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->xdp.lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->xdp.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem quarantine_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#560 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1273 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1273 irq_context: 0 misc_mtx fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1274 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1275 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1275 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock &meta->lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1380 irq_context: 0 rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1276 irq_context: 0 nfc_devlist_mutex kn->active#4 irq_context: 0 nfc_devlist_mutex kn->active#4 &root->deactivate_waitq irq_context: 0 nfc_devlist_mutex kn->active#4 &rq->__lock irq_context: 0 nfc_devlist_mutex kn->active#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &of->mutex &root->deactivate_waitq irq_context: 0 &p->lock &of->mutex &root->deactivate_waitq &p->pi_lock irq_context: 0 &p->lock &of->mutex &root->deactivate_waitq &p->pi_lock &rq->__lock irq_context: 0 &p->lock &of->mutex &root->deactivate_waitq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (linkwatch_work).work &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1276 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#571 irq_context: 0 cb_lock genl_mutex rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex &rq->__lock irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 l2tp_ip6_lock irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#571 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#189 irq_context: 0 (wq_completion)wg-kex-wg2#139 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 sb_writers#5 &fsnotify_mark_srcu &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &dev->mutex triggers_list_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#189 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1278 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1150 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1151 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#572 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#572 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#566 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1279 &rq->__lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1294 irq_context: 0 sb_writers#3 pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &pcp->lock &zone->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1294 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1295 irq_context: 0 misc_mtx (wq_completion)nfc2_nci_cmd_wq#1295 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#587 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#333 irq_context: 0 &hdev->req_lock &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#11 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex remove_cache_srcu irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 kn->active#15 &____s->seqcount#2 irq_context: 0 kn->active#15 &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock (worker)->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 &fsnotify_mark_srcu rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock quarantine_lock irq_context: 0 rtnl_mutex _xmit_ETHER/1 &n->list_lock irq_context: 0 rtnl_mutex _xmit_ETHER/1 &n->list_lock &c->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex netpoll_srcu &rq->__lock irq_context: 0 &child->perf_event_mutex &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#115 irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock &base->lock irq_context: 0 &mm->mmap_lock sb_writers#3 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#436 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &rcu_state.expedited_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#478 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#478 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#472 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#470 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#151 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#151 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#151 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#151 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#148 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#146 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#479 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1151 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1151 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#474 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1155 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1155 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1155 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1155 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1155 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1155 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1156 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1156 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1156 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#483 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#483 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#483 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#483 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#477 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#475 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#152 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#152 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1156 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1156 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1156 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1156 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#149 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1156 irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 percpu_counters_lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1299 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 pcpu_lock stock_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 batched_entropy_u8.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 kfence_freelist_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_sysfs_mtx.wait_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx.wait_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &pl->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_TUNNEL6#2 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#588 irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1156 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rq->__lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_node_0 irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rnp->exp_wq[1] irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex.wait_lock irq_context: 0 sk_lock-AF_PACKET &p->pi_lock irq_context: 0 sk_lock-AF_PACKET &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_PACKET &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 &rnp->exp_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rnp->exp_wq[2] irq_context: 0 cb_lock genl_mutex rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 &pl->lock key#12 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &rq->__lock irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex nfc_devlist_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem nl_table_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &xt[i].mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem nl_table_wait.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1156 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1157 irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &fsnotify_mark_srcu pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1157 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1157 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#147 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1157 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1157 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1157 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1157 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1157 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1157 &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex remove_cache_srcu irq_context: 0 cb_lock genl_mutex rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET &x->wait#3 irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_PHONET irq_context: 0 &sb->s_type->i_mutex_key#10 clock-AF_PHONET irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &lock->wait_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &n->list_lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1157 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1158 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock stock_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 key irq_context: 0 sb_writers &type->i_mutex_dir_key#2 pcpu_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 percpu_counters_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock key irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock pcpu_lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock kfence_freelist_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock percpu_counters_lock irq_context: 0 rtnl_mutex dev_addr_sem &br->lock &br->hash_lock quarantine_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 pcpu_lock stock_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &cfs_rq->removed.lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &____s->seqcount#2 irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 __ip_vs_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rnp->exp_wq[2] irq_context: 0 rtnl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_hotplug_mutex &rq->__lock irq_context: 0 rtnl_mutex dev_hotplug_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET key irq_context: 0 sb_writers#7 &of->mutex kn->active#4 &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: softirq (&peer->timer_send_keepalive) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &n->list_lock &c->lock irq_context: softirq (&peer->timer_send_keepalive) kfence_freelist_lock irq_context: 0 &xt[i].mutex fs_reclaim rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1158 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#484 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#484 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#478 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#476 irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1158 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1158 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1158 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1158 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &meta->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->sysfs_lock &n->list_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rnp->exp_wq[2] irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_node_0 irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.waiters.lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->sysfs_lock &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.waiters.lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &cfs_rq->removed.lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 crngs.lock base_crng.lock irq_context: 0 &mm->mmap_lock batched_entropy_u8.lock crngs.lock irq_context: 0 misc_mtx wq_pool_mutex.wait_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &obj_hash[i].lock irq_context: softirq net/wireless/reg.c:533 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq net/wireless/reg.c:533 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 kn->active#51 &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &bridge_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 rtnl_mutex &bridge_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &dev_addr_list_lock_key/1 rcu_read_lock _xmit_ETHER &c->lock irq_context: 0 ebt_mutex &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_base_lock &xa->xa_lock#4 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nf_hook_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 loop_ctl_mutex &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &____s->seqcount#2 irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &n->list_lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 &n->list_lock &c->lock irq_context: 0 loop_ctl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &____s->seqcount irq_context: 0 misc_mtx nfc_devlist_mutex dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &obj_hash[i].lock irq_context: 0 &q->sysfs_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &xa->xa_lock#5 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &xa->xa_lock#5 pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 stock_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 stock_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu &____s->seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events reg_work rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events reg_work &p->pi_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 uevent_sock_mutex fs_reclaim irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &____s->seqcount irq_context: 0 cb_lock genl_mutex &sem->wait_lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->wiphy_work_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#3 stock_lock irq_context: 0 rtnl_mutex &idev->mc_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &x->wait#10 irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &lock->wait_lock irq_context: 0 key#24 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &n->list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx batched_entropy_u32.lock crngs.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock _xmit_ETHER &c->lock irq_context: 0 kn->active#4 &lock->wait_lock irq_context: 0 kn->active#4 &p->pi_lock irq_context: 0 kn->active#4 &p->pi_lock &rq->__lock irq_context: 0 kn->active#4 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#2 namespace_sem &c->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 sk_lock-AF_PHONET port_mutex#2 irq_context: 0 sk_lock-AF_PHONET port_mutex#2 &rq->__lock irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) &meta->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) kfence_freelist_lock irq_context: 0 sk_lock-AF_PHONET port_mutex#2 local_port_range_lock.seqcount irq_context: 0 sk_lock-AF_PHONET port_mutex#2 &pnsocks.lock irq_context: 0 misc_mtx nfc_devlist_mutex nfc_devlist_mutex.wait_lock irq_context: 0 sk_lock-AF_PHONET fs_reclaim irq_context: 0 sk_lock-AF_PHONET fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sk_lock-AF_PHONET &____s->seqcount irq_context: 0 sk_lock-AF_PHONET pool_lock#2 irq_context: 0 sk_lock-AF_PHONET &rq->__lock irq_context: 0 sk_lock-AF_PHONET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PHONET &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 sk_lock-AF_PHONET &c->lock irq_context: 0 &root->kernfs_rwsem sched_map-wait-type-override &rq->__lock irq_context: 0 &root->kernfs_rwsem sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 sb_writers#3 sb_internal jbd2_handle irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock &base->lock irq_context: 0 rtnl_mutex _xmit_ETHER &local->filter_lock &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock pool_lock#2 irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 sb_writers#3 sb_internal &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex batched_entropy_u8.lock irq_context: 0 misc_mtx nfc_devlist_mutex kfence_freelist_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#14 &n->list_lock irq_context: 0 kn->active#14 &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem pcpu_lock stock_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 key irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &c->lock irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 pcpu_lock irq_context: 0 &type->i_mutex_dir_key#5 fs_reclaim &rq->__lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 rtnl_mutex &rcu_state.expedited_wq irq_context: 0 rtnl_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 percpu_counters_lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_lock_key#25 irq_context: softirq (&sk->sk_timer) slock-AF_INET#2 tk_core.seq.seqcount irq_context: softirq (&sk->sk_timer) slock-AF_INET#2 &obj_hash[i].lock irq_context: softirq (&sk->sk_timer) slock-AF_INET#2 &base->lock irq_context: softirq (&sk->sk_timer) slock-AF_INET#2 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 sb_writers#7 &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#47 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &type->s_umount_key#47 fill_pool_map-wait-type-override &c->lock irq_context: 0 &type->s_umount_key#47 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events reg_work rtnl_mutex &n->list_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex key irq_context: 0 rtnl_mutex rcu_state.exp_mutex pcpu_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex percpu_counters_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &____s->seqcount#2 irq_context: 0 rtnl_mutex gdp_mutex &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xa->xa_lock#5 &c->lock irq_context: 0 &xa->xa_lock#5 &n->list_lock irq_context: 0 &xa->xa_lock#5 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock cpu_asid_lock irq_context: 0 tty_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 pidmap_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh noop_qdisc.q.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem rcu_node_0 irq_context: 0 misc_mtx fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rcu_state.expedited_wq irq_context: 0 misc_mtx fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#131 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: softirq &(&bat_priv->bla.work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1158 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1158 &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &sem->wait_lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#129 irq_context: 0 (wq_completion)wg-kex-wg1#129 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#129 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_node_0 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1158 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &sem->wait_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem key irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem pcpu_lock irq_context: 0 kn->active#4 &cfs_rq->removed.lock irq_context: 0 kn->active#4 &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &p->pi_lock irq_context: 0 sb_writers#3 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &mapping->i_private_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock &base->lock irq_context: 0 tomoyo_ss rcu_read_lock &obj_hash[i].lock irq_context: 0 tomoyo_ss rcu_read_lock key irq_context: 0 tomoyo_ss rcu_read_lock pcpu_lock irq_context: 0 tomoyo_ss rcu_read_lock percpu_counters_lock irq_context: 0 tomoyo_ss rcu_read_lock pool_lock#2 irq_context: 0 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1159 irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex rcu_read_lock stock_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#485 irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock crngs.lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &n->list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &n->list_lock &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &rq->__lock cpu_asid_lock irq_context: 0 &xt[i].mutex batched_entropy_u8.lock crngs.lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire remove_cache_srcu irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire remove_cache_srcu quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#485 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &dev->master_mutex &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#485 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#485 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#479 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#477 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 &dev->master_mutex stock_lock irq_context: 0 sb_writers#3 &base->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &fsnotify_mark_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1297 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1159 irq_context: softirq (&peer->timer_retransmit_handshake) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1159 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1159 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1160 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1160 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss kfence_freelist_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &meta->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1160 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &____s->seqcount#2 irq_context: 0 sb_writers#3 sb_internal batched_entropy_u8.lock crngs.lock irq_context: 0 &sb->s_type->i_mutex_key#10 pool_lock irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock &n->list_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1160 irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &cfs_rq->removed.lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#140 irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.gp_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 &group->mark_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &group->mark_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &sb->s_type->i_mutex_key#13/4 &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex fs_reclaim stock_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &____s->seqcount#2 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) batched_entropy_u8.lock crngs.lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1160 &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1160 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 &mm->mmap_lock &base->lock irq_context: 0 &mm->mmap_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock sb_writers#3 &base->lock irq_context: 0 &mm->mmap_lock sb_writers#3 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1160 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1160 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1160 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1160 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex fs_reclaim key irq_context: 0 rtnl_mutex fs_reclaim pcpu_lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &____s->seqcount#2 irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &____s->seqcount irq_context: 0 &dev->master_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex fs_reclaim &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work krc.lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) batched_entropy_u8.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 pcpu_alloc_mutex free_vmap_area_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem pcpu_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem percpu_counters_lock irq_context: 0 remove_cache_srcu &base->lock irq_context: 0 remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 &dev->master_mutex &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex console_owner_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex console_owner irq_context: 0 __ip_vs_mutex console_owner_lock irq_context: 0 pcpu_alloc_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 remove_cache_srcu stock_lock irq_context: 0 &p->lock &of->mutex kn->active#4 &base->lock irq_context: 0 &p->lock &of->mutex kn->active#4 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) rcu_node_0 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#3 remove_cache_srcu key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &n->list_lock irq_context: 0 __ip_vs_mutex console_owner irq_context: 0 &dev->master_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER krc.lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 tty_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tty_mutex &rq->__lock irq_context: 0 tty_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 (work_completion)(&buf->work) irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 &tty->files_lock irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 stock_lock irq_context: 0 &dev->master_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 &dev->master_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1161 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#486 irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &journal->j_state_lock &journal->j_wait_transaction_locked &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 free_vmap_area_lock &pcp->lock &zone->lock irq_context: 0 free_vmap_area_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &n->list_lock irq_context: 0 &type->s_umount_key#47 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &type->s_umount_key#47 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#486 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#480 irq_context: softirq mm/memcontrol.c:679 rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#480 &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock percpu_counters_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#480 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &rq->__lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&sdp->work) pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) tasklist_lock &base->lock &obj_hash[i].lock irq_context: 0 &dev_instance->mutex &rq->__lock irq_context: 0 &dev_instance->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &sem->wait_lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 &vma->vm_lock->lock pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &sem->wait_lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#5 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex pool_lock#2 irq_context: 0 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) sched_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &n->list_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock stock_lock irq_context: 0 rtnl_mutex rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rnp->exp_wq[3] irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock irq_context: 0 &u->iolock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 &mm->mmap_lock batched_entropy_u8.lock crngs.lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_NETROM quarantine_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) &n->list_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem bit_wait_table + i irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock &c->lock irq_context: 0 uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount#2 irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &pcp->lock &zone->lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)nfc3_nci_tx_wq#478 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#478 &rq->__lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#478 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 &f->f_pos_lock &p->lock &rq->__lock irq_context: 0 &f->f_pos_lock &p->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 &vma->vm_lock->lock fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &sem->wait_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &____s->seqcount#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start stock_lock irq_context: 0 pernet_ops_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &type->i_mutex_dir_key#3 stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock pool_lock#2 irq_context: 0 &p->lock &of->mutex kn->active#13 &rq->__lock irq_context: softirq (&in_dev->mr_ifc_timer) batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#3 sb_internal &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 &sig->cred_guard_mutex &meta->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->master_mutex remove_cache_srcu irq_context: 0 &dev->master_mutex remove_cache_srcu quarantine_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &meta->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 kfence_freelist_lock irq_context: 0 &dev->master_mutex remove_cache_srcu &c->lock irq_context: 0 &dev->master_mutex remove_cache_srcu &n->list_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &lock->wait_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &lock->wait_lock &p->pi_lock irq_context: 0 crtc_ww_class_acquire &lock->wait_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem batched_entropy_u8.lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET6/1 &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET6/1 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET6/1 &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ep->mtx sched_map-wait-type-override &rq->__lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1040 &rq->__lock irq_context: 0 rtnl_mutex fs_reclaim percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ifa->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1161 irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &base->lock &obj_hash[i].lock irq_context: 0 &u->iolock &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 &u->iolock &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &u->iolock &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 &lruvec->lru_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq#2 &p->pi_lock irq_context: 0 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock fill_pool_map-wait-type-override kfence_freelist_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock free_vmap_area_lock &base->lock &obj_hash[i].lock irq_context: 0 tomoyo_ss rcu_read_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem key irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem pcpu_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem percpu_counters_lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 tomoyo_ss rcu_read_lock pcpu_lock stock_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &____s->seqcount#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &sbi->s_orphan_lock &lock->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock batched_entropy_u32.lock irq_context: 0 rtnl_mutex fs_reclaim pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1299 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1299 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1299 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1300 irq_context: 0 sk_lock-AF_INET crngs.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &c->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &n->list_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1300 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1300 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1300 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex stack_depot_init_mutex irq_context: 0 cb_lock genl_mutex rtnl_mutex krc.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &dir->lock#2 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#333 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#328 irq_context: 0 sk_lock-AF_INET rcu_read_lock &n->list_lock &c->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rhashtable_bucket irq_context: 0 sk_lock-AF_INET &asoc->wait irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->pi_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 &sighand->siglock &pcp->lock &zone->lock irq_context: 0 &sighand->siglock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 rcu_read_lock &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_node_0 irq_context: 0 sb_writers#3 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)hci1 irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 nfc_devlist_mutex kn->active#4 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &____s->seqcount irq_context: 0 pernet_ops_rwsem uevent_sock_mutex remove_cache_srcu irq_context: 0 (wq_completion)events_unbound connector_reaper_work rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &type->s_umount_key#49 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &____s->seqcount irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &base->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 &xt[i].mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_mutex_key#9 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#4 irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc34_nci_rx_wq#4 irq_context: softirq &(&tbl->gc_work)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &sem->wait_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex leds_list_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex leds_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 uts_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock quarantine_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem stock_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem pcpu_lock stock_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock stock_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 uts_sem key irq_context: 0 uts_sem pcpu_lock irq_context: 0 &type->i_mutex_dir_key#5 key irq_context: 0 tomoyo_ss rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock pool_lock#2 irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock pool_lock#2 irq_context: 0 &ep->mtx &obj_hash[i].lock pool_lock irq_context: 0 sb_internal remove_cache_srcu irq_context: 0 &data->open_mutex uevent_sock_mutex &n->list_lock irq_context: 0 uts_sem percpu_counters_lock irq_context: 0 uts_sem pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_INET &meta->lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev_instance->mutex remove_cache_srcu &base->lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle bit_wait_table + i irq_context: 0 &bgl->locks[i].lock irq_context: 0 kn->active#47 &n->list_lock irq_context: 0 kn->active#47 &n->list_lock &c->lock irq_context: 0 &dev_instance->mutex remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex quarantine_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rcu_state.expedited_wq irq_context: 0 &xt[i].mutex purge_vmap_area_lock kfence_freelist_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1306 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#327 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1161 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#93 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#93 &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#93 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#132 irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &bridge_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 &mm->mmap_lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 stock_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem stock_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem key irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem pcpu_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem percpu_counters_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem pcpu_lock stock_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &lock->wait_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem pool_lock#2 irq_context: 0 &vma->vm_lock->lock mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock stock_lock irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock &____s->seqcount#2 irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock &____s->seqcount irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock &c->lock irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock clock-AF_INET irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &dccp_hashinfo.bhash[i].lock stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &dccp_hashinfo.bhash[i].lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &dccp_hashinfo.bhash[i].lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem pcpu_lock irq_context: 0 rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock remove_cache_srcu irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 l2tp_ip_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock console_owner_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 tasklist_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_state_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (work_completion)(&ht->run_work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex lweventlist_lock kfence_freelist_lock irq_context: 0 nfc_devlist_mutex gdp_mutex &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 nfc_devlist_mutex gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem pcpu_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem percpu_counters_lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock quarantine_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &____s->seqcount irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#51 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 sb_writers#3 &rq->__lock cpu_asid_lock irq_context: softirq rcu_read_lock_bh rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu pool_lock#2 irq_context: 0 rtnl_mutex sk_lock-AF_INET slock-AF_INET#2 irq_context: 0 rtnl_mutex sk_lock-AF_INET &mm->mmap_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET rcu_node_0 irq_context: 0 rtnl_mutex sk_lock-AF_INET &rq->__lock irq_context: 0 &dev->mutex &meta->lock irq_context: 0 misc_mtx nfc_devlist_mutex bus_type_sem &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex bus_type_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex sk_lock-AF_INET rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex slock-AF_INET#2 irq_context: 0 misc_mtx sched_map-wait-type-override &rq->__lock irq_context: 0 misc_mtx sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex sk_lock-AF_INET fs_reclaim irq_context: 0 rtnl_mutex sk_lock-AF_INET fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex sk_lock-AF_INET &c->lock irq_context: 0 &dev_instance->mutex &pcp->lock &zone->lock irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex &rq->__lock irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &sem->wait_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET &n->list_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET &n->list_lock &c->lock irq_context: 0 rtnl_mutex sk_lock-AF_INET pool_lock#2 irq_context: 0 rtnl_mutex sk_lock-AF_INET &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss pcpu_lock stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 rcu_read_lock rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 rcu_read_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex sk_lock-AF_INET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET key irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 &pipe->mutex/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET percpu_counters_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex sk_lock-AF_INET rcu_read_lock &dir->lock#2 irq_context: 0 rtnl_mutex sk_lock-AF_INET rcu_read_lock &ul->lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock pool_lock#2 irq_context: 0 rtnl_mutex sk_lock-AF_INET &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 __ip_vs_mutex irq_context: 0 __ip_vs_mutex (console_sem).lock irq_context: 0 __ip_vs_mutex console_lock console_srcu console_owner_lock irq_context: 0 __ip_vs_mutex console_lock console_srcu console_owner irq_context: 0 __ip_vs_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 __ip_vs_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 __ip_vs_mutex &rq->__lock irq_context: 0 __ip_vs_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &p->alloc_lock &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#200 irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1301 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock pool_lock#2 irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock.wait_lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 &mm->mmap_lock stock_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem pcpu_lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#130 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1301 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &ssp->srcu_sup->srcu_gp_mutex &ssp->srcu_sup->srcu_cb_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &base->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback rcu_node_0 irq_context: 0 &type->i_mutex_dir_key/1 quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx wq_mayday_lock irq_context: softirq rcu_read_lock &br->hash_lock &____s->seqcount#2 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.waiters.lock irq_context: 0 cb_lock remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex fs_reclaim &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem cgroup_threadgroup_rwsem.waiters.lock rcu_read_lock &p->pi_lock irq_context: 0 nl_table_wait.lock &p->pi_lock irq_context: 0 nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nf_nat_proto_mutex &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_node_0 irq_context: 0 purge_vmap_area_lock quarantine_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &rq->__lock irq_context: 0 rcu_read_lock &sighand->siglock &n->list_lock irq_context: 0 rcu_read_lock &sighand->siglock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &fsnotify_mark_srcu percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &xt[i].mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu &c->lock irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex subsys mutex#40 &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex subsys mutex#40 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex rcu_read_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock &obj_hash[i].lock irq_context: 0 nf_nat_proto_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)nfc34_nci_tx_wq#4 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &base->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &base->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 batched_entropy_u8.lock crngs.lock irq_context: 0 __ip_vs_mutex &cfs_rq->removed.lock irq_context: 0 __ip_vs_mutex &obj_hash[i].lock irq_context: 0 __ip_vs_mutex pool_lock#2 irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &disk->open_mutex inode_hash_lock &sb->s_type->i_lock_key#3 irq_context: 0 &bdev->bd_holder_lock irq_context: 0 &q->mq_freeze_lock percpu_ref_switch_lock &obj_hash[i].lock irq_context: 0 &q->mq_freeze_lock percpu_ref_switch_lock pool_lock#2 irq_context: 0 &q->mq_freeze_wq irq_context: 0 (&bdi->laptop_mode_wb_timer) irq_context: 0 &wb->work_lock irq_context: 0 (work_completion)(&(&wb->dwork)->work) irq_context: 0 rcu_read_lock (wq_completion)writeback irq_context: 0 (wq_completion)writeback (work_completion)(&barr->work) irq_context: 0 (wq_completion)writeback (work_completion)(&barr->work) &x->wait#10 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 &xt[i].mutex remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &xt[i].mutex remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &lock->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc8_nci_tx_wq#7 irq_context: 0 tty_mutex &tty->legacy_mutex &c->lock irq_context: 0 nfc_devlist_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss batched_entropy_u8.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss kfence_freelist_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &meta->lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &rq->__lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &tty->termios_rwsem &rq->__lock irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &tty->termios_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex &rq->__lock irq_context: 0 tty_mutex &tty->legacy_mutex &n->list_lock irq_context: 0 tty_mutex &tty->legacy_mutex &n->list_lock &c->lock irq_context: 0 &tty->legacy_mutex &rq->__lock irq_context: 0 &tty->legacy_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tty_mutex &obj_hash[i].lock pool_lock irq_context: 0 tty_mutex &tty->legacy_mutex &rq->__lock cpu_asid_lock irq_context: 0 tty_mutex tty_mutex.wait_lock irq_context: 0 &tty->legacy_mutex tty_mutex.wait_lock irq_context: 0 &tty->legacy_mutex &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &____s->seqcount#2 irq_context: 0 &tty->legacy_mutex &p->pi_lock &rq->__lock irq_context: 0 &tty->legacy_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tty_mutex batched_entropy_u8.lock irq_context: 0 tty_mutex kfence_freelist_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock key irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock pcpu_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock percpu_counters_lock irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 &meta->lock irq_context: 0 kn->active#4 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 kfence_freelist_lock irq_context: 0 &tty->legacy_mutex rcu_read_lock rcu_node_0 irq_context: 0 &tty->legacy_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex &c->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx pool_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 &tty->legacy_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock irq_context: 0 tty_mutex &n->list_lock irq_context: 0 tty_mutex &n->list_lock &c->lock irq_context: 0 tty_mutex &tty->legacy_mutex batched_entropy_u8.lock irq_context: 0 tty_mutex &tty->legacy_mutex kfence_freelist_lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem free_vmap_area_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#956 irq_context: 0 pernet_ops_rwsem &sem->wait_lock irq_context: 0 &xt[i].mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &xt[i].mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &xt[i].mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &n->list_lock irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem free_vmap_area_lock pool_lock#2 irq_context: 0 &tty->legacy_mutex devpts_mutex rcu_read_lock rcu_node_0 irq_context: 0 &dev->mutex uevent_sock_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 &tty->legacy_mutex devpts_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tty->legacy_mutex devpts_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &hwstats->hwsdev_list_lock &rq->__lock irq_context: 0 &vcapture->lock &rq->__lock irq_context: 0 &vcapture->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#956 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &tty->legacy_mutex devpts_mutex rcu_node_0 irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] &____s->seqcount irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock remove_cache_srcu percpu_counters_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu pcpu_lock stock_lock irq_context: 0 sb_writers#4 oom_adj_mutex &rcu_state.expedited_wq irq_context: 0 sb_writers#4 oom_adj_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &cfs_rq->removed.lock irq_context: 0 sb_writers#4 oom_adj_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock pool_lock#2 irq_context: 0 sb_writers#4 oom_adj_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &wg->static_identity.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#45 &rq->__lock irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &tty->termios_rwsem rcu_read_lock rcu_node_0 irq_context: 0 &root->kernfs_rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &root->kernfs_rwsem &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 &type->i_mutex_dir_key#2 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &mdev->req_queue_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#956 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#958 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#378 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#378 irq_context: 0 &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#110 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#115 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->hash_lock &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#387 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu pool_lock#2 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev_instance->mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#388 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &base->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1161 irq_context: 0 sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_QIPCRTR &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &tsk->futex_exit_mutex &obj_hash[i].lock irq_context: 0 &tsk->futex_exit_mutex pool_lock#2 irq_context: 0 misc_mtx fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 misc_mtx rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#388 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#112 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#976 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&w->w) &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#115 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#976 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#976 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#976 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 rcu_node_0 irq_context: 0 cb_lock genl_mutex rcu_read_lock &n->list_lock &c->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#977 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock pool_lock irq_context: 0 &q->debugfs_mutex &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 rtnl_mutex &vlan_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount irq_context: 0 &hdev->req_lock &hdev->lock batched_entropy_u8.lock irq_context: 0 &hdev->req_lock &hdev->lock kfence_freelist_lock irq_context: 0 &hdev->req_lock &hdev->lock &meta->lock irq_context: 0 &q->debugfs_mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#4 uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &q->debugfs_mutex &rcu_state.expedited_wq irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#8 irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#977 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#978 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock &____s->seqcount irq_context: 0 pernet_ops_rwsem &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock &cfs_rq->removed.lock irq_context: 0 &hdev->lock &rq->__lock irq_context: 0 &hdev->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#202 irq_context: 0 vlan_ioctl_mutex rtnl_mutex &cfs_rq->removed.lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex &obj_hash[i].lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex pool_lock#2 irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim &rq->__lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#8 irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_NETLINK &rq->__lock irq_context: 0 sk_lock-AF_NETLINK &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) &rq->__lock irq_context: 0 nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex _xmit_ETHER krc.lock &obj_hash[i].lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 namespace_sem &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 namespace_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex dpm_list_mtx &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#72 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 sb_writers#4 &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nfnl_subsys_ipset &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events reg_work rtnl_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#5 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock pcpu_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &meta->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 sb_writers#4 remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex quarantine_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock irq_context: softirq (&peer->timer_persistent_keepalive) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &n->list_lock irq_context: 0 slock-AF_INET#2 &sk->sk_lock.wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ret->b_state_lock &journal->j_list_lock &wb->work_lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem bit_wait_table + i irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &rq->__lock irq_context: 0 kn->active#13 remove_cache_srcu irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg1#70 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#6 &cfs_rq->removed.lock irq_context: 0 sb_writers#6 &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &____s->seqcount irq_context: 0 syslog_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &dentry->d_lock &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem pool_lock#2 irq_context: 0 rcu_read_lock &dentry->d_lock &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &dentry->d_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &base->lock irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lweventlist_lock &c->lock irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 rcu_read_lock rcu_node_0 irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &pcp->lock &zone->lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_sysfs_mtx.wait_lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx.wait_lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers#5 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &obj_hash[i].lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx key irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx pcpu_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx percpu_counters_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx pool_lock#2 irq_context: 0 &xt[i].mutex pool_lock irq_context: 0 kn->active#4 fs_reclaim &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle stock_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle pcpu_lock stock_lock irq_context: 0 kn->active#4 fs_reclaim &obj_hash[i].lock irq_context: 0 kn->active#4 fs_reclaim pool_lock#2 irq_context: 0 rtnl_mutex gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 sb_writers#5 lock#4 &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) purge_vmap_area_lock &meta->lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) purge_vmap_area_lock kfence_freelist_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events free_ipc_work sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events free_ipc_work sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_read_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock remove_cache_srcu &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx &wq->mutex &pool->lock irq_context: 0 misc_mtx &wq->mutex &x->wait#10 irq_context: 0 misc_mtx wq_pool_mutex &wq->mutex &pool->lock irq_context: 0 rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 bt_proto_lock &n->list_lock irq_context: 0 bt_proto_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#407 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#405 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &____s->seqcount#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock rcu_read_lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 sb_writers#3 jbd2_handle &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ret->b_state_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock key irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nl_table_wait.lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)hci1#2 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &list->lock#5 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &list->lock#7 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1034 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1034 irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 drm_unplug_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &fsnotify_mark_srcu &rq->__lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &dev->mode_config.idr_mutex &file->master_lookup_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &mm->mmap_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &fsnotify_mark_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 drm_unplug_srcu &obj_hash[i].lock irq_context: 0 drm_unplug_srcu pool_lock#2 irq_context: 0 drm_unplug_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#7 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ipvlan->addrs_lock &obj_hash[i].lock pool_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock percpu_counters_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &group->avgs_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle fill_pool_map-wait-type-override pool_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &p->lock &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 jbd2_handle &rq->__lock irq_context: 0 &sb->s_type->i_lock_key#23 bit_wait_table + i irq_context: 0 &mm->mmap_lock sb_writers#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock pool_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &br->lock rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex &br->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &br->lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &dentry->d_lock &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &dentry->d_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &dentry->d_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock kfence_freelist_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events drain_vmap_work vmap_purge_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 rtnl_mutex net_rwsem pool_lock#2 irq_context: 0 rtnl_mutex net_rwsem &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex net_rwsem nl_table_lock irq_context: 0 rtnl_mutex net_rwsem nl_table_wait.lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &pcp->lock &zone->lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 &q->debugfs_mutex &rq->__lock irq_context: 0 &q->debugfs_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &mm->mmap_lock rcu_read_lock &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem stock_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock kfence_freelist_lock irq_context: 0 fs_reclaim rcu_node_0 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &rq->__lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: softirq rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &type->i_mutex_dir_key/1 &group->inotify_data.idr_lock &obj_hash[i].lock pool_lock irq_context: 0 &type->i_mutex_dir_key/1 &group->inotify_data.idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &type->i_mutex_dir_key/1 &group->inotify_data.idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 &type->i_mutex_dir_key/1 &group->inotify_data.idr_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 &group->inotify_data.idr_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &type->i_mutex_dir_key/1 &group->inotify_data.idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &type->i_mutex_dir_key/1 &rq->__lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq#2 &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 &sb->s_type->i_lock_key#23 &dentry->d_lock &wq#2 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pcpu_lock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 pcpu_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1301 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1302 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu rcu_node_0 irq_context: 0 &root->kernfs_rwsem &rcu_state.expedited_wq irq_context: 0 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 krc.lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 krc.lock &base->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &simple_offset_xa_lock &obj_hash[i].lock irq_context: softirq rcu_callback rcu_read_lock &q->mq_freeze_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &n->list_lock irq_context: 0 &q->sysfs_dir_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock crngs.lock irq_context: 0 &type->i_mutex_dir_key/1 rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &n->list_lock &c->lock irq_context: 0 &q->sysfs_dir_lock &sem->wait_lock irq_context: 0 &q->sysfs_dir_lock &p->pi_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 misc_mtx remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 fill_pool_map-wait-type-override stock_lock irq_context: 0 fill_pool_map-wait-type-override key irq_context: 0 fill_pool_map-wait-type-override pcpu_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 fill_pool_map-wait-type-override percpu_counters_lock irq_context: 0 fill_pool_map-wait-type-override pcpu_lock stock_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem pcpu_lock stock_lock irq_context: 0 sb_writers &dentry->d_lock irq_context: 0 sb_writers tomoyo_ss irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &c->lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override pool_lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &____s->seqcount#2 irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &____s->seqcount irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock kernfs_idr_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss quarantine_lock irq_context: 0 &type->i_mutex_dir_key/1 remove_cache_srcu irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex _xmit_ETHER &local->filter_lock &n->list_lock &c->lock irq_context: 0 kn->active#48 &n->list_lock irq_context: 0 kn->active#48 &n->list_lock &c->lock irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lock#6 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx lock#6 kcov_remote_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock krc.lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &obj_hash[i].lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &hashinfo->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 rcu_read_lock &local->handle_wake_tx_queue_lock hwsim_radio_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key &n->list_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &type->i_mutex_dir_key/1 remove_cache_srcu quarantine_lock irq_context: 0 &type->i_mutex_dir_key/1 remove_cache_srcu &c->lock irq_context: 0 &type->i_mutex_dir_key/1 remove_cache_srcu &n->list_lock irq_context: 0 remove_cache_srcu &meta->lock irq_context: 0 remove_cache_srcu kfence_freelist_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key/1 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem stock_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 misc_mtx fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal &base->lock irq_context: 0 sb_writers#3 sb_internal &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 cb_lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &sem->wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &rq->__lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &peer->endpoint_lock irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex rcu_read_lock rcu_node_0 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_node_0 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_callback rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &pn->hash_lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem pool_lock#2 irq_context: 0 &dev->mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &dev->mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 pool_lock#2 irq_context: softirq rcu_callback rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: softirq rcu_callback rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock percpu_counters_lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: softirq rcu_callback rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &obj_hash[i].lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock pool_lock#2 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex uevent_sock_mutex stock_lock irq_context: 0 rtnl_mutex uevent_sock_mutex key irq_context: 0 rtnl_mutex uevent_sock_mutex pcpu_lock irq_context: 0 rtnl_mutex uevent_sock_mutex percpu_counters_lock irq_context: 0 rtnl_mutex uevent_sock_mutex pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex gdp_mutex &n->list_lock irq_context: 0 rtnl_mutex gdp_mutex &n->list_lock &c->lock irq_context: 0 sb_writers tomoyo_ss mmu_notifier_invalidate_range_start irq_context: 0 sb_writers tomoyo_ss rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers &xattrs->lock irq_context: 0 sb_writers tomoyo_ss &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss &____s->seqcount#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss &____s->seqcount irq_context: 0 sb_writers tomoyo_ss &____s->seqcount#2 irq_context: 0 sb_writers tomoyo_ss &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex rcu_state.barrier_lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex rcu_state.barrier_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &x->wait#24 irq_context: 0 sb_writers tomoyo_ss rcu_read_lock rcu_node_0 irq_context: 0 sb_writers tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 sb_writers tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers tomoyo_ss &n->list_lock irq_context: 0 sb_writers tomoyo_ss &n->list_lock &c->lock irq_context: 0 sb_writers tomoyo_ss &rq->__lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#2 remove_cache_srcu irq_context: 0 &type->i_mutex_dir_key#2 remove_cache_srcu &rq->__lock irq_context: 0 &type->i_mutex_dir_key#2 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#2 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_lock_key#22 bit_wait_table + i irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &p->lock &of->mutex kn->active#4 batched_entropy_u8.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &p->lock &of->mutex kn->active#4 kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 drm_unplug_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&ifa->dad_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &mapping->i_mmap_rwsem &cfs_rq->removed.lock irq_context: 0 &q->sysfs_dir_lock &p->pi_lock &rq->__lock irq_context: 0 &q->sysfs_dir_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &p->lock &of->mutex kn->active#4 &meta->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem percpu_counters_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &q->debugfs_mutex &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &q->debugfs_mutex &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &q->debugfs_mutex &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu quarantine_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu &c->lock irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu &n->list_lock irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &p->lock remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu rcu_node_0 irq_context: 0 cb_lock genl_mutex remove_cache_srcu &rq->__lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 rcu_state.exp_mutex.wait_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &rcu_state.expedited_wq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 cb_lock genl_mutex fs_reclaim &rq->__lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem __ip_vs_app_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex key irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 &xa->xa_lock#9 &c->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex pcpu_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key &c->lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &n->list_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mount_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mount_lock rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 misc_mtx fill_pool_map-wait-type-override &rq->__lock irq_context: 0 misc_mtx fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex leds_list_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#5 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 tomoyo_ss remove_cache_srcu rcu_node_0 irq_context: 0 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 tomoyo_ss remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.gp_wq irq_context: 0 rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex.wait_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex.wait_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &n->list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 &xt[i].mutex rcu_read_lock key irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 &xt[i].mutex rcu_read_lock pcpu_lock irq_context: softirq (&ndev->rs_timer) &zone->lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) &zone->lock &____s->seqcount irq_context: 0 &vma->vm_lock->lock &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.gp_wq &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&aux->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &file->fbs_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle pcpu_lock stock_lock irq_context: 0 drm_connector_list_iter &file->master_lookup_lock irq_context: 0 drm_connector_list_iter &mm->mmap_lock irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &ndev->req_lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&rxnet->peer_keepalive_timer) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex irq_context: 0 &p->lock &of->mutex kn->active#4 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 &dev_instance->mutex fs_reclaim &rq->__lock irq_context: 0 &dev_instance->mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers tomoyo_ss &pcp->lock &zone->lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: hardirq &vkms_out->lock &dev->event_lock &x->wait#15 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers tomoyo_ss rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &br->hash_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex &br->hash_lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex remove_cache_srcu &____s->seqcount irq_context: 0 &hdev->req_lock rcu_read_lock rcu_node_0 irq_context: 0 misc_mtx wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem key irq_context: 0 rtnl_mutex &root->kernfs_rwsem pcpu_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem percpu_counters_lock irq_context: 0 &hdev->req_lock &hdev->lock pool_lock irq_context: 0 &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock stock_lock irq_context: 0 &hdev->req_lock rcu_read_lock &rq->__lock irq_context: 0 &hdev->req_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex sched_map-wait-type-override &pool->lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#7 irq_context: 0 (wq_completion)hci2#9 irq_context: 0 sb_writers#4 oom_adj_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#4 oom_adj_mutex &obj_hash[i].lock irq_context: 0 sb_writers#4 oom_adj_mutex pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) rcu_read_lock batched_entropy_u8.lock crngs.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->clientlist_mutex &helper->lock &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire &rq->__lock irq_context: 0 tomoyo_ss rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers fill_pool_map-wait-type-override &c->lock irq_context: 0 &dev->mutex kn->active#4 irq_context: 0 &dev->mutex kn->active#4 &root->deactivate_waitq irq_context: 0 &root->deactivate_waitq irq_context: 0 sb_writers fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers fill_pool_map-wait-type-override pool_lock irq_context: 0 &type->i_mutex_dir_key#4 iattr_mutex iattr_mutex.wait_lock irq_context: softirq (&app->join_timer) &app->lock batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &meta->lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &base->lock irq_context: 0 &xt[i].mutex &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &cfs_rq->removed.lock irq_context: softirq rcu_read_lock hwsim_radio_lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev_instance->mutex remove_cache_srcu pool_lock#2 irq_context: 0 &dev_instance->mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev_instance->mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev_instance->mutex remove_cache_srcu &rq->__lock irq_context: 0 &dev_instance->mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex rtnl_mutex.wait_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &dev->master_mutex key irq_context: 0 &dev->master_mutex pcpu_lock irq_context: 0 &dev->master_mutex percpu_counters_lock irq_context: 0 &dev->master_mutex pcpu_lock stock_lock irq_context: 0 &dev->master_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock genl_mutex (console_sem).lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &cfs_rq->removed.lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim pool_lock#2 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &x->wait#10 irq_context: 0 sb_writers#3 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->s_umount_key#46/1 &rq->__lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) &list->lock#5 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &list->lock#7 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#592 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1304 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1306 irq_context: 0 &dev->mutex &root->kernfs_rwsem &base->lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1306 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1307 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1307 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1308 irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rq->__lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci5#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci5#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 rcu_read_lock rcu_read_lock &sighand->siglock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&p->forward_delay_timer) &br->lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 &u->iolock &base->lock irq_context: 0 &u->iolock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 namespace_sem pcpu_alloc_mutex &rq->__lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 namespace_sem pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bond0#80 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1198 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1198 irq_context: 0 kn->active#15 &c->lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex &c->lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex &n->list_lock irq_context: 0 (wq_completion)events_power_efficient (crda_timeout).work rtnl_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#6 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock stock_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock rcu_read_lock stock_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) quarantine_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: softirq (&vblank->disable_timer) &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1251 &rq->__lock irq_context: 0 &xt[i].mutex &rcu_state.expedited_wq irq_context: 0 &xt[i].mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &xt[i].mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &xt[i].mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1251 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#13 &____s->seqcount#2 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#70 irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rcu_state.expedited_wq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &table->lock#3 irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &rq->__lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 tasklist_lock &sighand->siglock &(&sig->stats_lock)->lock &____s->seqcount#4 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 slock-AF_INET6 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock quarantine_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1251 irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 sk_lock-AF_INET l2tp_ip_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 kn->active#14 &____s->seqcount#2 irq_context: 0 kn->active#15 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 kn->active#15 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#15 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock &____s->seqcount#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex pool_lock#2 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &n->list_lock &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex rfkill_global_mutex.wait_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu &c->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &n->list_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex rcu_read_lock &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#9 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx wq_pool_mutex wq_pool_mutex.wait_lock irq_context: 0 misc_mtx wq_pool_mutex &rq->__lock irq_context: 0 misc_mtx wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 wq_pool_mutex.wait_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->tx_work) irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3 &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3 &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#3 &n->list_lock &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->tx_work) &list->lock#5 irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.gp_wq irq_context: 0 &dev->master_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fs_reclaim &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)hci1#2 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#128 irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem batched_entropy_u8.lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &meta->lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1251 irq_context: 0 &dev->dev_mutex &____s->seqcount#2 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &obj_hash[i].lock pool_lock irq_context: 0 &f->f_pos_lock sb_writers#4 &____s->seqcount#2 irq_context: 0 &f->f_pos_lock sb_writers#4 &____s->seqcount irq_context: 0 &mdev->req_queue_mutex vim2m:1183:(hdl)->_lock quarantine_lock irq_context: 0 &dev->dev_mutex &rq->__lock irq_context: 0 &dev->dev_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex &rq->__lock irq_context: 0 &dev->mutex &vhci_hcd->vhci->lock irq_context: 0 rcu_read_lock &sighand->siglock batched_entropy_u8.lock crngs.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex hcd_root_hub_lock irq_context: 0 &dev->mutex hcd_root_hub_lock device_state_lock irq_context: 0 &dev->mutex usbfs_mutex irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 &dev->mutex usbfs_mutex &rq->__lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &c->lock irq_context: 0 &dev->mutex usbfs_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &hub->status_mutex irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &hub->status_mutex mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex kfence_freelist_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &hub->status_mutex pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (check_lifetime_work).work rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &caifn->caifdevs.lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 &dev->mutex &hub->status_mutex hcd_root_hub_lock irq_context: 0 &dev->mutex &hub->status_mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &dev->mutex &hub->status_mutex fs_reclaim irq_context: softirq rcu_callback batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER krc.lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &hub->status_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->mutex &hub->status_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle pcpu_lock stock_lock irq_context: 0 &dev->mutex &hub->status_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &hub->status_mutex &vhci_hcd->vhci->lock irq_context: 0 &dev->mutex &hub->status_mutex &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem kfence_freelist_lock irq_context: 0 &dev->mutex &hub->status_mutex hcd_root_hub_lock &bh->lock irq_context: 0 &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &hub->status_mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &hub->status_mutex &x->wait#19 irq_context: 0 &data->open_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem krc.lock &obj_hash[i].lock irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &data->open_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &meta->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock kfence_freelist_lock irq_context: 0 &dev->mutex uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->mutex &hub->status_mutex &base->lock irq_context: 0 &dev->mutex &hub->status_mutex &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex &hub->status_mutex &rq->__lock irq_context: 0 &dev->mutex &hub->status_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq lock#6 &kcov->lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex &hub->status_mutex (&timer.timer) irq_context: 0 &data->open_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &rq->__lock irq_context: 0 &dev->mutex &hub->status_mutex &c->lock irq_context: 0 &dev->mutex &hub->status_mutex &n->list_lock irq_context: 0 &dev->mutex &hub->status_mutex &n->list_lock &c->lock irq_context: 0 &dev->mutex hcd_root_hub_lock hcd_urb_list_lock irq_context: 0 &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->power.lock &dev->power.lock/1 irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &pcp->lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &pcp->lock &zone->lock irq_context: 0 (wq_completion)mm_percpu_wq (work_completion)(&(({ do { const void *__vpp_verify = (typeof((&vmstat_work) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((&vmstat_work))) *)((&vmstat_work))); (typeof((typeof(*((&vmstat_work))) *)((&vmstat_work)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); }))->work) &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex rcu_read_lock key irq_context: 0 rtnl_mutex rcu_read_lock pcpu_lock irq_context: 0 rtnl_mutex rcu_read_lock percpu_counters_lock irq_context: 0 deferred_probe_mutex &rq->__lock irq_context: 0 deferred_probe_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &dev->mutex &dev->power.lock &dev->power.wait_queue irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) lock#6 &kcov->lock irq_context: 0 &dev->mutex &mm->mmap_lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &____s->seqcount#2 irq_context: 0 &dev->mutex &ps->lock irq_context: 0 &dev->mutex &hub->irq_urb_lock irq_context: 0 &dev->mutex (&hub->irq_urb_retry) irq_context: 0 &dev->mutex &base->lock irq_context: 0 &dev->mutex hcd_urb_unlink_lock irq_context: 0 tasklist_lock &sighand->siglock &n->list_lock irq_context: 0 tasklist_lock &sighand->siglock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &wg->device_update_lock fs_reclaim &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex hcd_root_hub_lock &bh->lock irq_context: 0 &dev->mutex hcd_root_hub_lock &p->pi_lock irq_context: 0 &dev->mutex usb_kill_urb_queue.lock irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override rcu_node_0 irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex (work_completion)(&hub->tt.clear_work) irq_context: 0 &dev->mutex hcd_urb_list_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_node_0 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1252 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1252 &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 iattr_mutex.wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#555 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#555 irq_context: 0 sk_lock-AF_INET quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &ps->lock irq_context: 0 fs_reclaim &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem pool_lock irq_context: 0 &dev->mutex &hub->status_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex hcd_root_hub_lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex hcd_root_hub_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#6 irq_context: 0 (work_completion)(&local->tx_work) &rq->__lock irq_context: 0 (work_completion)(&local->tx_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_tx_wq#6 irq_context: 0 &dev->mutex &hub->status_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &lock->wait_lock irq_context: 0 (wq_completion)usb_hub_wq (work_completion)(&hub->events) &p->pi_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#7 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &meta->lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 (wq_completion)nfc17_nci_tx_wq#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &dev->mutex &hub->status_mutex remove_cache_srcu irq_context: 0 &dev->mutex &hub->status_mutex remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_node_0 irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &n->list_lock irq_context: 0 &dev->mutex &hub->status_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &hub->status_mutex remove_cache_srcu &n->list_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock batched_entropy_u8.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock kfence_freelist_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &meta->lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock rcu_node_0 irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock &tbl->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &hub->status_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#5 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &base->lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &hub->status_mutex quarantine_lock irq_context: 0 &dev->mutex &hub->status_mutex remove_cache_srcu &rq->__lock irq_context: 0 &dev->mutex &hub->status_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu quarantine_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &c->lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &n->list_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#5 irq_context: 0 rtnl_mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock key irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &root->deactivate_waitq &p->pi_lock irq_context: 0 &root->deactivate_waitq &p->pi_lock &rq->__lock irq_context: 0 &root->deactivate_waitq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &data->open_mutex rfkill_global_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#129 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &n->list_lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_state.barrier_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.fib6_gc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#5 irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: softirq (&ndev->rs_timer) kfence_freelist_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->rq_qos_mutex &q->mq_freeze_lock &rq->__lock irq_context: 0 uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 cb_lock rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 sb_internal remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &fsnotify_mark_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 sb_writers#6 pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) batched_entropy_u8.lock crngs.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#5 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#157 irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1065 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 pcpu_alloc_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 cb_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 cb_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: softirq (&lapb->t1timer) &lapb->lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 cb_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 kn->active#16 &____s->seqcount#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rnp->exp_wq[1] irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 cb_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 &dev->mutex kn->active#4 &rq->__lock irq_context: 0 &dev->mutex kn->active#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &root->deactivate_waitq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->ipv4.ra_mutex &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 pcpu_alloc_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle bit_wait_table + i irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &ret->b_state_lock bit_wait_table + i irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rnp->exp_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex sk_lock-AF_INET rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex sk_lock-AF_INET rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kauditd_wait.lock &p->pi_lock &rq->__lock irq_context: 0 kauditd_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) &meta->lock irq_context: 0 kauditd_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1065 &rq->__lock irq_context: 0 vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &pcp->lock &zone->lock irq_context: 0 vmap_purge_lock free_vmap_area_lock init_mm.page_table_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#157 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#157 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem kfence_freelist_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex &obj_hash[i].lock pool_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex netpoll_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &pcp->lock &zone->lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)events (debug_obj_work).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh crngs.lock irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key/1 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex pcpu_alloc_mutex rcu_node_0 irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#5 irq_context: softirq &(&bat_priv->nc.work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)nfc31_nci_tx_wq#5 irq_context: 0 rtnl_mutex nf_hook_mutex remove_cache_srcu irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &obj_hash[i].lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &n->list_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&hsr->announce_timer) rcu_read_lock &pcp->lock &zone->lock irq_context: 0 &type->s_umount_key#46/1 pcpu_alloc_mutex &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev->mutex &rq->__lock irq_context: 0 &type->s_umount_key#46/1 pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &____s->seqcount#2 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &pcp->lock &zone->lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &____s->seqcount irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#7 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rcu_state.expedited_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 &dev->dev_mutex &n->list_lock irq_context: 0 &dev->dev_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound connector_reaper_work &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 sk_lock-AF_PHONET &n->list_lock irq_context: 0 sk_lock-AF_PHONET &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex &rq->__lock cpu_asid_lock irq_context: 0 &ctx->vb_mutex irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex &obj_hash[i].lock irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex pool_lock#2 irq_context: 0 &dev->master_mutex uevent_sock_mutex irq_context: 0 &dev->master_mutex uevent_sock_mutex fs_reclaim irq_context: 0 &dev->master_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->master_mutex uevent_sock_mutex pool_lock#2 irq_context: 0 &dev->master_mutex uevent_sock_mutex nl_table_lock irq_context: 0 &dev->master_mutex uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 &dev->master_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &dev->master_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex.wait_lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &sb->s_type->i_lock_key#24 bit_wait_table + i irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 ebt_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#65 irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex stock_lock irq_context: 0 misc_mtx &dev->mutex key irq_context: 0 misc_mtx &dev->mutex pcpu_lock irq_context: 0 misc_mtx &dev->mutex percpu_counters_lock irq_context: 0 misc_mtx &dev->mutex pcpu_lock stock_lock irq_context: 0 misc_mtx &dev->mutex &cfs_rq->removed.lock irq_context: 0 misc_mtx rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock kfence_freelist_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &mm->mmap_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex remove_cache_srcu irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex gdp_mutex lock kernfs_idr_lock &c->lock irq_context: 0 &dev->mutex rfkill_global_mutex key irq_context: 0 &dev->mutex rfkill_global_mutex pcpu_lock irq_context: 0 &dev->mutex rfkill_global_mutex percpu_counters_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem rtnl_mutex fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events free_ipc_work fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events free_ipc_work fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock batched_entropy_u8.lock crngs.lock base_crng.lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &rq->__lock cpu_asid_lock irq_context: softirq &(&net->ipv6.addr_chk_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq &(&net->ipv6.addr_chk_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#66 irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &sem->wait_lock irq_context: 0 cb_lock genl_mutex stock_lock irq_context: 0 cb_lock genl_mutex key irq_context: 0 cb_lock genl_mutex pcpu_lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex lweventlist_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 kn->active#50 &____s->seqcount#2 irq_context: 0 kn->active#50 &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 fill_pool_map-wait-type-override &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&ovs_net->masks_rebalance)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock quarantine_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 nfc_devlist_mutex mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->hash_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#5 irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tsk->futex_exit_mutex rcu_node_0 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#5 irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)nfc34_nci_rx_wq#5 irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 key#9 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex.wait_lock irq_context: 0 rtnl_mutex &wg->device_update_lock &p->pi_lock irq_context: 0 rtnl_mutex &wg->device_update_lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle bit_wait_table + i irq_context: 0 rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: 0 &sig->cred_guard_mutex &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#132 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_node_0 irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nfc3_nci_rx_wq#427 irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 batched_entropy_u8.lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 kfence_freelist_lock irq_context: 0 &dev_instance->mutex vicodec_core:1851:(hdl)->_lock &rq->__lock irq_context: 0 &dev_instance->mutex vicodec_core:1851:(hdl)->_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock pool_lock#2 irq_context: 0 &dev_instance->mutex remove_cache_srcu irq_context: 0 &dev_instance->mutex remove_cache_srcu quarantine_lock irq_context: 0 &dev_instance->mutex remove_cache_srcu &c->lock irq_context: 0 &dev_instance->mutex remove_cache_srcu &n->list_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 &mm->mmap_lock &anon_vma->rwsem mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex remove_cache_srcu &n->list_lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 misc_mtx nfc_devlist_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.gp_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &dev->mutex uevent_sock_mutex &base->lock irq_context: 0 &dev->mutex uevent_sock_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &journal->j_state_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 key irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 percpu_counters_lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 &dev->mutex uevent_sock_mutex quarantine_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock pool_lock irq_context: 0 &type->i_mutex_dir_key#5 pcpu_lock irq_context: 0 &type->i_mutex_dir_key#5 percpu_counters_lock irq_context: 0 &ep->mtx rcu_read_lock rcu_node_0 irq_context: 0 &ep->mtx rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 &data->open_mutex lock kernfs_idr_lock &c->lock irq_context: 0 &data->open_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 sb_internal remove_cache_srcu quarantine_lock irq_context: 0 sb_internal remove_cache_srcu &c->lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: softirq (&hsr->announce_timer) rcu_read_lock &hsr->seqnr_lock quarantine_lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 &hdev->req_lock &hdev->lock rcu_node_0 irq_context: 0 lweventlist_lock pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 pernet_ops_rwsem rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 nfc_devlist_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)bond0#80 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &x->wait#10 irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sighand->siglock &____s->seqcount#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &p->lock &of->mutex &root->deactivate_waitq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &p->lock &of->mutex &root->deactivate_waitq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &rcu_state.expedited_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx net_rwsem &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg2#129 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 &pipe->mutex/1 pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 wq_pool_attach_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) rcu_node_0 irq_context: 0 sb_writers#3 remove_cache_srcu rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)bond0#80 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 quarantine_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq irq_context: 0 loop_validate_mutex &lo->lo_mutex &rq->__lock irq_context: 0 loop_validate_mutex &lo->lo_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events free_ipc_work &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#5 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 &root->kernfs_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex sysctl_lock krc.lock &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock key irq_context: 0 rcu_read_lock &vma->vm_lock->lock pcpu_lock irq_context: 0 &root->kernfs_rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 &root->kernfs_rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &root->kernfs_rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &root->kernfs_rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &root->kernfs_rwsem fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &root->kernfs_rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &root->kernfs_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)bond0#80 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 &meta->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 kfence_freelist_lock irq_context: 0 (wq_completion)bond0#80 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1283 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1283 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc4_nci_cmd_wq#271 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1283 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1286 irq_context: 0 (wq_completion)bond0#80 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1376 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1376 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1380 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#646 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1286 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1286 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1286 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1286 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1287 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#572 irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 &group->mark_mutex remove_cache_srcu &rq->__lock irq_context: 0 &group->mark_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 pcpu_lock stock_lock irq_context: 0 &dev->master_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 &dev->master_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &dev->master_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &dev->master_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->master_mutex uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1198 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#202 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#199 irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1199 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#83 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#83 &rq->__lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#83 &devlink_port->type_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1199 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1199 &rq->__lock irq_context: 0 rtnl_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock quarantine_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1199 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx quarantine_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rename_lock.seqcount irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_node_0 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1199 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1200 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rfkill_global_mutex rfkill_global_mutex.wait_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#519 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock batched_entropy_u8.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 &simple_offset_xa_lock kfence_freelist_lock irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &n->list_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#519 irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#513 irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &n->list_lock &c->lock irq_context: 0 sk_lock-AF_INET stock_lock irq_context: 0 sk_lock-AF_INET pcpu_lock stock_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem percpu_counters_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx dev_hotplug_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->master_mutex uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 &dev->master_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 tomoyo_ss &n->list_lock irq_context: 0 sb_writers#5 tomoyo_ss &n->list_lock &c->lock irq_context: 0 &dev->master_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 &dev->master_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 (wq_completion)nfc3_nci_tx_wq#511 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock &n->list_lock &c->lock irq_context: 0 namespace_sem fs_reclaim &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock console_owner irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_owner_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx console_owner irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock key irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock pcpu_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock percpu_counters_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 l2tp_ip_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex kfence_freelist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &meta->lock irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex quarantine_lock irq_context: 0 namespace_sem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock crngs.lock irq_context: 0 cb_lock genl_mutex rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock &nsim_dev->fa_cookie_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tn->lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_INET &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&ndev->rs_timer) init_task.mems_allowed_seq.seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &____s->seqcount irq_context: 0 css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &sem->wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex &base->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &vma->vm_lock->lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex kfence_freelist_lock irq_context: 0 cb_lock genl_mutex &base->lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock &____s->seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 key irq_context: 0 &sb->s_type->i_mutex_key#10 percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#10 pcpu_lock stock_lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock &obj_hash[i].lock pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem rcu_read_lock rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &____s->seqcount#2 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)netns net_cleanup_work &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#114 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#110 irq_context: 0 &p->lock remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tt.commit_lock &bat_priv->tvlv.container_list_lock quarantine_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1161 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1161 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1162 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#487 irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &p->alloc_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &p->alloc_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#487 irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->id_addr_timer)->work) &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->id_addr_timer)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 ebt_mutex fs_reclaim &rq->__lock irq_context: 0 purge_vmap_area_lock &meta->lock irq_context: softirq (&lapb->t1timer) &lapb->lock &list->lock#20 irq_context: softirq &list->lock#20 irq_context: softirq rcu_read_lock x25_neigh_list_lock irq_context: softirq rcu_read_lock x25_list_lock irq_context: softirq rcu_read_lock x25_forward_list_lock irq_context: 0 purge_vmap_area_lock kfence_freelist_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#481 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex percpu_counters_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#479 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock pool_lock irq_context: 0 &group->mark_mutex &cfs_rq->removed.lock irq_context: 0 &group->mark_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock krc.lock &base->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->sysfs_dir_lock &rq->__lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#7 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock rcu_read_lock &pool->lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 rtnl_mutex remove_cache_srcu fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem quarantine_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss mount_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss mount_lock rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1200 irq_context: 0 &dev->mutex kn->active#4 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1200 &rq->__lock irq_context: 0 &dev->mutex (work_completion)(&rfkill->uevent_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1200 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex (&timer.timer) irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex (work_completion)(&vkms_state->composer_work)#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx subsys mutex#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work rcu_node_0 irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) irq_context: 0 &sighand->siglock quarantine_lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: 0 misc_mtx nfc_devlist_mutex stock_lock irq_context: 0 misc_mtx nfc_devlist_mutex key irq_context: 0 misc_mtx nfc_devlist_mutex pcpu_lock irq_context: 0 misc_mtx nfc_devlist_mutex percpu_counters_lock irq_context: 0 misc_mtx nfc_devlist_mutex pcpu_lock stock_lock irq_context: 0 misc_mtx nfc_devlist_mutex rcu_node_0 irq_context: 0 misc_mtx nfc_devlist_mutex &rcu_state.expedited_wq irq_context: 0 misc_mtx nfc_devlist_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &ret->b_state_lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx cpu_hotplug_lock xps_map_mutex &rq->__lock irq_context: 0 rtnl_mutex slock-AF_INET#2 &sk->sk_lock.wq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) irq_context: 0 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &simple_offset_xa_lock &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &simple_offset_xa_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock &dir->lock#2 irq_context: 0 rtnl_mutex slock-AF_INET#2 &sk->sk_lock.wq &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx cpu_hotplug_lock xps_map_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_read_lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock irq_context: 0 rtnl_mutex _xmit_ETHER krc.lock &base->lock irq_context: 0 rtnl_mutex _xmit_ETHER krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 &dev->clientlist_mutex &helper->lock put_task_map-wait-type-override#3 &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &ei->xattr_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock put_task_map-wait-type-override#3 pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 drm_connector_list_iter &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock pcpu_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &n->list_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 pernet_ops_rwsem pcpu_alloc_mutex.wait_lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock percpu_counters_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem quarantine_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_node_0 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex &rq->__lock irq_context: 0 rtnl_mutex _xmit_IPGRE &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &dev_addr_list_lock_key#2/1 _xmit_ETHER krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &vlan_netdev_addr_lock_key/1 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&ovs_net->masks_rebalance)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &____s->seqcount irq_context: 0 pernet_ops_rwsem &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock &peer->endpoint_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 &xt[i].mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock stock_lock irq_context: 0 rtnl_mutex &tn->lock &rq->__lock irq_context: 0 rtnl_mutex &tn->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem percpu_counters_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: softirq (&ndev->rs_timer) &ndev->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq (&ndev->rs_timer) &ndev->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq (&ndev->rs_timer) &ndev->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) remove_cache_srcu irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) remove_cache_srcu &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&hdev->cmd_work) remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex &rq->__lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override rcu_node_0 irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &rcu_state.expedited_wq irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &c->lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &n->list_lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu &obj_hash[i].lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex slock-AF_INET#2 &sk->sk_lock.wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex slock-AF_INET#2 &sk->sk_lock.wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex _xmit_IPGRE &n->list_lock &c->lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &nft_net->commit_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &nft_net->commit_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &rq->__lock irq_context: 0 &mm->mmap_lock &c->lock batched_entropy_u8.lock irq_context: 0 &mm->mmap_lock &c->lock kfence_freelist_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem &meta->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem kfence_freelist_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 &q->debugfs_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#4 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 &sb->s_type->i_mutex_key#10 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci1#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci1#4 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1602 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &dev->mutex deferred_probe_mutex &rq->__lock irq_context: 0 &dev->mutex deferred_probe_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &mm->mmap_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem rcu_node_0 irq_context: 0 &p->lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 rtnl_mutex devnet_rename_sem console_owner_lock irq_context: 0 rtnl_mutex devnet_rename_sem console_owner irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1602 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1602 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem quarantine_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6/1 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6/1 slock-AF_INET6 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6/1 rlock-AF_INET6 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6/1 &list->lock#17 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET6/1 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET6/1 &sctp_ep_hashtable[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET6/1 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET6/1 pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET6/1 clock-AF_INET6 irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1602 &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#786 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#786 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 nf_sockopt_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)writeback (work_completion)(&barr->work) &x->wait#10 &p->pi_lock irq_context: 0 (wq_completion)writeback (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)writeback (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (work_completion)(&(&wb->bw_dwork)->work) irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 &bdi->cgwb_release_mutex irq_context: 0 &bdi->cgwb_release_mutex cgwb_lock irq_context: 0 &root->kernfs_rwsem &pcp->lock &zone->lock irq_context: 0 subsys mutex#37 &k->k_lock klist_remove_lock irq_context: 0 &q->sysfs_dir_lock sysfs_symlink_target_lock irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 &q->sysfs_dir_lock &root->kernfs_rwsem pool_lock#2 irq_context: 0 &q->debugfs_mutex pin_fs_lock irq_context: 0 &q->debugfs_mutex &dentry->d_lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pin_fs_lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mount_lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 mount_lock mount_lock.seqcount irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &xa->xa_lock#9 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex subsys mutex#40 &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex subsys mutex#40 &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex subsys mutex#40 pool_lock#2 irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 pool_lock#2 irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock pool_lock#2 irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &obj_hash[i].lock pool_lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &obj_hash[i].lock pool_lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->debugfs_mutex rcu_read_lock &dentry->d_lock irq_context: 0 &q->debugfs_mutex &fsnotify_mark_srcu irq_context: 0 &q->debugfs_mutex &sb->s_type->i_lock_key#7 irq_context: 0 &q->debugfs_mutex &s->s_inode_list_lock irq_context: 0 &q->debugfs_mutex &xa->xa_lock#9 irq_context: 0 &q->debugfs_mutex &obj_hash[i].lock irq_context: 0 &q->debugfs_mutex pool_lock#2 irq_context: 0 &q->debugfs_mutex &dentry->d_lock &dentry->d_lock/1 irq_context: 0 &q->debugfs_mutex &dentry->d_lock &obj_hash[i].lock irq_context: 0 &q->debugfs_mutex &dentry->d_lock pool_lock#2 irq_context: 0 &q->debugfs_mutex rcu_read_lock mount_lock irq_context: 0 &q->debugfs_mutex rcu_read_lock mount_lock mount_lock.seqcount irq_context: 0 &q->debugfs_mutex mount_lock irq_context: 0 &q->debugfs_mutex mount_lock mount_lock.seqcount irq_context: 0 &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &n->list_lock irq_context: 0 &type->i_mutex_dir_key/1 tomoyo_ss &n->list_lock &c->lock irq_context: softirq rcu_callback rcu_read_lock &q->mq_freeze_wq irq_context: 0 subsys mutex#36 &k->k_lock klist_remove_lock irq_context: 0 (work_completion)(&q->timeout_work) irq_context: 0 (wq_completion)kintegrityd irq_context: 0 (wq_completion)kintegrityd &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)kintegrityd &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &simple_offset_xa_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 rcu_read_lock &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &sb->s_type->i_mutex_key#4 &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key/1 &fsnotify_mark_srcu irq_context: 0 sb_writers &s->s_inode_list_lock irq_context: 0 sb_writers &obj_hash[i].lock irq_context: 0 sb_writers &sbinfo->stat_lock irq_context: 0 sb_writers &xa->xa_lock#9 irq_context: 0 sb_writers &fsnotify_mark_srcu irq_context: 0 &q->rq_qos_mutex irq_context: 0 &q->rq_qos_mutex &stats->lock irq_context: 0 &q->rq_qos_mutex (&cb->timer) irq_context: 0 &q->rq_qos_mutex &obj_hash[i].lock irq_context: 0 &q->rq_qos_mutex &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_wait_updates &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &journal->j_wait_updates &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &q->rq_qos_mutex pool_lock#2 irq_context: 0 &tags->lock irq_context: 0 &xa->xa_lock#11 irq_context: 0 &q->unused_hctx_lock irq_context: 0 &q->queue_lock &blkcg->lock percpu_ref_switch_lock irq_context: 0 &q->queue_lock &blkcg->lock percpu_ref_switch_lock &obj_hash[i].lock irq_context: 0 &q->queue_lock &blkcg->lock percpu_ref_switch_lock pool_lock#2 irq_context: 0 (&sq->pending_timer) irq_context: 0 (work_completion)(&td->dispatch_work) irq_context: 0 &lo->lo_work_lock irq_context: 0 (&lo->timer) irq_context: softirq rcu_callback rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &q->blkcg_mutex irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &q->blkcg_mutex (&sq->pending_timer) irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &q->blkcg_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &q->blkcg_mutex &base->lock irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &q->blkcg_mutex percpu_counters_lock irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &q->blkcg_mutex pcpu_lock irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &q->blkcg_mutex pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &q->blkcg_mutex &q->queue_lock irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) pool_lock#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) &xa->xa_lock#11 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) pcpu_lock irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) blk_queue_ida.xa_lock irq_context: 0 (wq_completion)events (work_completion)(&blkg->free_work) percpu_ref_switch_lock irq_context: softirq rcu_callback percpu_ref_switch_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu quarantine_lock irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu &c->lock irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu &n->list_lock irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 &mm->mmap_lock sb_writers#3 remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &p->lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 &n->list_lock irq_context: softirq (&icsk->icsk_delack_timer) slock-AF_INET#2 &n->list_lock &c->lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &____s->seqcount irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 batched_entropy_u8.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 kfence_freelist_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &meta->lock irq_context: 0 &vma->vm_lock->lock &base->lock &obj_hash[i].lock irq_context: 0 &dev->mutex rfkill_global_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock irq_context: 0 &type->s_umount_key#45 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_state.barrier_mutex &cfs_rq->removed.lock irq_context: 0 &data->open_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 &data->open_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#786 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1602 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1600 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1600 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1600 irq_context: 0 nf_nat_proto_mutex nf_hook_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#786 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#776 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#772 irq_context: 0 &dev->mutex rfkill_global_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy136 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) rcu_node_0 irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu pool_lock#2 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &____s->seqcount irq_context: 0 nf_sockopt_mutex rcu_node_0 irq_context: 0 nf_sockopt_mutex &rcu_state.expedited_wq irq_context: 0 nf_sockopt_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nf_sockopt_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 nf_sockopt_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 tomoyo_ss &cfs_rq->removed.lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#84 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#84 &devlink_port->type_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy136 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1603 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1603 &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1603 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#84 &nsim_trap_data->trap_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#84 &nsim_trap_data->trap_lock crngs.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#84 &nsim_trap_data->trap_lock &nsim_dev->fa_cookie_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex key irq_context: 0 cb_lock genl_mutex rfkill_global_mutex pcpu_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex percpu_counters_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex &lock->wait_lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &n->list_lock irq_context: 0 (wq_completion)events wireless_nlevent_work net_rwsem &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 &sfilter->notify_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sfilter->notify_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#84 &nsim_trap_data->trap_lock &obj_hash[i].lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex devnet_rename_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#84 &nsim_trap_data->trap_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#84 &nsim_trap_data->trap_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#84 &nsim_trap_data->trap_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1603 irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1601 irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1601 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#13 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss remove_cache_srcu rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#7 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1604 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1604 &rq->__lock irq_context: 0 &mdev->req_queue_mutex &dev->mutex#3 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1604 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &child->perf_event_mutex rcu_node_0 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1604 irq_context: 0 &pipe->mutex/1 &pipe->wr_wait &ep->lock &ep->wq irq_context: 0 &pipe->mutex/1 &pipe->wr_wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 &pipe->mutex/1 &pipe->wr_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 &pipe->mutex/1 &pipe->wr_wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 rcu_node_0 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1602 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1602 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1602 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1602 irq_context: 0 &dev->mutex deferred_probe_mutex deferred_probe_mutex.wait_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock pool_lock irq_context: 0 &data->open_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#436 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy135 irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1605 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_INET6 k-slock-AF_INET6 fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1605 rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1605 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_tx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_tx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1605 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock batched_entropy_u8.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1605 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 &sfilter->notify_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &n->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &nvmeq->sq_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1605 irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 &child->perf_event_mutex &rcu_state.expedited_wq irq_context: 0 &child->perf_event_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &child->perf_event_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &child->perf_event_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1603 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#980 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#8 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &base->lock irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tn->lock &rq->__lock irq_context: 0 &ep->mtx fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &ep->mtx fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1603 &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock &ul->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1059 irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex cpu_hotplug_lock wq_pool_mutex &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock rcu_read_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_PPPOX irq_context: 0 &hdev->req_lock &hdev->lock &n->list_lock &c->lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &n->list_lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sk_lock-AF_PPPOX slock-AF_PPPOX irq_context: 0 slock-AF_PPPOX irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1603 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1603 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1603 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1603 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy135 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy135 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy135 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem &rcu_state.expedited_wq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) devices_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1606 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1606 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#980 irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1604 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1604 &rq->__lock irq_context: 0 purge_vmap_area_lock &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1604 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#787 irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_node_0 irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rcu_state.expedited_wq irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#787 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#787 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#787 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1379 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1028 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#430 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#978 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#982 irq_context: 0 &q->debugfs_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex rcu_node_0 irq_context: 0 &q->debugfs_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#777 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#773 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &sb->s_type->i_mutex_key#3 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#773 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#773 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &base->lock irq_context: 0 &sig->cred_guard_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_lock_key#8 bit_wait_table + i irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)nfc14_nci_rx_wq#8 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock kfence_freelist_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock &meta->lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#392 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#981 irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1607 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1607 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1605 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1605 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1605 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1605 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem stock_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem key irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem pcpu_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &tbl->lock &____s->seqcount#2 irq_context: 0 rtnl_mutex &tbl->lock &pcp->lock &zone->lock irq_context: 0 rtnl_mutex &tbl->lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 rtnl_mutex &tbl->lock &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem percpu_counters_lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem pcpu_lock stock_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#84 &nsim_trap_data->trap_lock quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#788 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#788 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#788 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#788 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#778 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#778 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#778 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &n->list_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &n->list_lock &c->lock irq_context: 0 kn->active#4 rcu_read_lock &rq->__lock irq_context: 0 kn->active#4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#774 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#774 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#774 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock lweventlist_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->lock lweventlist_lock &n->list_lock &c->lock irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#313 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#313 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#313 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#313 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#309 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#789 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#309 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#309 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#789 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#779 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_nat_proto_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#779 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1049 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key/1 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock &wb->work_lock &obj_hash[i].lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock &wb->work_lock &base->lock irq_context: 0 &ret->b_state_lock &journal->j_list_lock &wb->work_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_long (work_completion)(&br->mcast_gc_work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#779 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#775 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &tb->tb6_lock nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#775 &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex &rcu_state.expedited_wq irq_context: 0 &dev->mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nfc_devlist_mutex remove_cache_srcu rcu_node_0 irq_context: 0 nfc_devlist_mutex remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 nfc_devlist_mutex remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock sb_writers#4 &c->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#775 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex irq_context: 0 rtnl_mutex netpoll_srcu &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#307 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1608 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nf_hook_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &rcu_state.expedited_wq irq_context: 0 misc_mtx &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1608 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1608 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1608 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1606 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1606 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1609 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1609 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1609 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex netpoll_srcu &cfs_rq->removed.lock irq_context: 0 rtnl_mutex netpoll_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex netpoll_srcu pool_lock#2 irq_context: 0 &p->lock key irq_context: 0 &p->lock pcpu_lock irq_context: 0 &p->lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem percpu_counters_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1609 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1609 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1609 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1607 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1607 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1607 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1607 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1610 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1610 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1610 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock percpu_counters_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1610 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1608 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1608 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1608 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1608 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock &base->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &wg->device_update_lock &handshake->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &meta->lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) kfence_freelist_lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &ep->mtx rcu_node_0 irq_context: 0 &ep->mtx &rcu_state.expedited_wq irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ep->mtx &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ep->mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &ep->mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET slock-AF_INET#2 pool_lock#2 irq_context: 0 kn->active#46 remove_cache_srcu irq_context: 0 kn->active#46 remove_cache_srcu &rq->__lock irq_context: 0 kn->active#46 remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1049 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1051 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg0#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 rtnl_mutex dev_addr_sem team->team_lock_key#76 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg2#78 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 &____s->seqcount irq_context: softirq rcu_read_lock rcu_read_lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg2#154 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &____s->seqcount#2 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &____s->seqcount irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &n->list_lock irq_context: 0 sb_writers#3 jbd2_handle &rcu_state.expedited_wq irq_context: 0 sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg2#155 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 lweventlist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#8 irq_context: 0 &dev->master_mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 &dev->master_mutex uevent_sock_mutex &rq->__lock irq_context: 0 &dev->master_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 tomoyo_ss &rq->__lock irq_context: 0 sb_writers#5 tomoyo_ss &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fs_reclaim &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_tx_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#7 irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy163 irq_context: 0 rtnl_mutex sk_lock-AF_INET batched_entropy_u8.lock irq_context: 0 rtnl_mutex sk_lock-AF_INET kfence_freelist_lock irq_context: 0 rtnl_mutex sk_lock-AF_INET &meta->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#428 irq_context: 0 &p->lock &of->mutex kn->active#4 &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 stock_lock irq_context: 0 (wq_completion)nfc34_nci_tx_wq#6 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 rcu_read_lock &pool->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#155 irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex.wait_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 &____s->seqcount#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire &____s->seqcount#2 irq_context: 0 misc_mtx pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1200 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1200 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1201 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &n->list_lock &c->lock irq_context: 0 &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#107 irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex &rq->__lock irq_context: softirq &(&krcp->monitor_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &obj_hash[i].lock pool_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag6_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)nfc37_nci_cmd_wq#5 irq_context: 0 &dev->mode_config.mutex &rq->__lock irq_context: 0 &dev->mode_config.mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#429 irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc37_nci_rx_wq#5 irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 key irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#106 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#11 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &rq->__lock irq_context: 0 &q->sysfs_dir_lock &q->sysfs_lock &q->debugfs_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#138 (work_completion)(&peer->transmit_handshake_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock irq_context: hardirq rcu_state.barrier_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: hardirq rcu_state.barrier_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &rq->__lock irq_context: 0 fs_reclaim &rcu_state.expedited_wq irq_context: 0 fs_reclaim &rcu_state.expedited_wq &p->pi_lock irq_context: 0 fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 fs_reclaim &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX slock-AF_PPPOX irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX chan_lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock irq_context: 0 (wq_completion)nfc37_nci_rx_wq#5 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 rtnl_mutex &idev->mc_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 rtnl_mutex nf_hook_mutex &____s->seqcount#2 irq_context: 0 rtnl_mutex nf_hook_mutex &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 stock_lock irq_context: 0 sb_writers#4 key irq_context: 0 sb_writers#4 pcpu_lock irq_context: 0 sb_writers#4 percpu_counters_lock irq_context: 0 sb_writers#4 pcpu_lock stock_lock irq_context: 0 sb_writers#4 &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#479 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx &rq->__lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tsk->futex_exit_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 &f->f_pos_lock sb_writers#4 &n->list_lock &c->lock irq_context: 0 &f->f_pos_lock sb_writers#4 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 &f->f_pos_lock sb_writers#4 rcu_read_lock &rq->__lock irq_context: 0 &f->f_pos_lock sb_writers#4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 rtnl_mutex wq_pool_mutex &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2 &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock kfence_freelist_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#158 irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 sb_writers#9 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 pidmap_lock &obj_hash[i].lock irq_context: 0 pidmap_lock pool_lock#2 irq_context: hardirq rcu_state.barrier_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex &bat_priv->softif_vlan_list_lock &c->lock irq_context: 0 rtnl_mutex &bat_priv->softif_vlan_list_lock &n->list_lock irq_context: 0 rtnl_mutex &bat_priv->softif_vlan_list_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle bit_wait_table + i irq_context: hardirq rcu_state.barrier_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex kernfs_idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 sb_writers#4 fs_reclaim &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex nf_hook_mutex &n->list_lock irq_context: 0 rtnl_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 tty_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &tty->termios_rwsem rcu_read_lock &rq->__lock irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &tty->termios_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock pool_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) pool_lock#2 irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2 &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2 &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#156 irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 rtnl_mutex &br->hash_lock &n->list_lock irq_context: 0 rtnl_mutex &br->hash_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)nfc30_nci_rx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc7_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#10 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 stack_depot_init_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex bpf_devs_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy164 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex bpf_devs_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex bpf_devs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex bpf_devs_lock rcu_read_lock rhashtable_bucket irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy163 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1071 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1071 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1071 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1073 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1073 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1073 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#430 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#135 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1076 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1076 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 rcu_read_lock &xa->xa_lock#9 key#13 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1077 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1077 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1078 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1078 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#136 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#136 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1079 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1080 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1089 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#442 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#442 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1082 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1082 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1084 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1084 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#443 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#443 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#444 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#444 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#133 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1089 irq_context: 0 rtnl_mutex rcu_state.exp_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1086 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1086 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1087 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1087 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1087 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1089 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#445 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#445 irq_context: 0 misc_mtx nfc_devlist_mutex batched_entropy_u8.lock crngs.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1088 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#139 irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#139 &rq->__lock irq_context: softirq (&timer) rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex sched_map-wait-type-override &pool->lock &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&net->ipv6.addr_chk_work)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#139 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#139 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#136 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock _xmit_ETHER &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &n->list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1090 irq_context: 0 kn->active#50 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->lock &c->lock irq_context: 0 kn->active#52 &n->list_lock irq_context: 0 kn->active#52 &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1090 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#446 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#446 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#440 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1091 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1092 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex triggers_list_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex triggers_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1092 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1093 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1093 &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock remove_cache_srcu pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1093 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg0#80 irq_context: 0 cb_lock genl_mutex batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#474 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1137 irq_context: 0 rtnl_mutex devnet_rename_sem batched_entropy_u8.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle &ret->b_state_lock bit_wait_table + i irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex devnet_rename_sem kfence_freelist_lock irq_context: 0 rtnl_mutex devnet_rename_sem &meta->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex uevent_sock_mutex.wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1138 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock stock_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock key irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.gp_wq irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#49 &____s->seqcount#2 irq_context: 0 kn->active#49 &____s->seqcount irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ep->mtx &____s->seqcount#2 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &rnp->exp_wq[0] &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock rcu_read_lock &pool->lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock pcpu_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1139 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1139 &rq->__lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#592 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#592 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events deferred_process_work rtnl_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1141 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1141 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1141 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 ebt_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1094 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1095 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &____s->seqcount irq_context: 0 rtnl_mutex dev_addr_sem team->team_lock_key#82 irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1095 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &meta->lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 kfence_freelist_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1139 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1139 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#149 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#149 irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rcu_state.expedited_wq irq_context: 0 rtnl_mutex uevent_sock_mutex quarantine_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &____s->seqcount irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#449 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#449 &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 &rq->__lock irq_context: 0 &ep->mtx fill_pool_map-wait-type-override &c->lock irq_context: 0 &ep->mtx fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 kn->active#15 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &n->list_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rcu_read_lock batched_entropy_u8.lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &rnp->exp_wq[3] irq_context: 0 cb_lock genl_mutex rcu_read_lock kfence_freelist_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_node_0 irq_context: 0 rtnl_mutex uevent_sock_mutex &rcu_state.expedited_wq irq_context: 0 rtnl_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_node_0 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock kfence_freelist_lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rnp->exp_wq[3] irq_context: 0 &vma->vm_lock->lock fs_reclaim &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1096 irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &c->lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex pool_lock#2 irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &dentry->d_lock &dentry->d_lock/1 &lru->node[i].lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &n->list_lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#141 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_long (work_completion)(&(&ipvs->defense_work)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#141 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1098 irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem key irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem pcpu_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem percpu_counters_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override pool_lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&(&hwstats->traffic_dw)->work) &hwstats->hwsdev_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#49 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 &vma->vm_lock->lock &rcu_state.expedited_wq irq_context: 0 &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &net->ipv6.addrconf_hash_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &meta->lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &____s->seqcount irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock batched_entropy_u8.lock irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock kfence_freelist_lock irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock &n->list_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1098 irq_context: 0 cb_lock genl_mutex rtnl_mutex rcu_node_0 irq_context: 0 rtnl_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock &macvlan_netdev_addr_lock_key &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg0#161 irq_context: 0 rtnl_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1101 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1101 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#144 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1102 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1102 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &tb->tb6_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1103 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1103 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &____s->seqcount irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &n->list_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &n->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock nl_table_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock nl_table_wait.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock rcu_read_lock lock#8 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock rcu_read_lock id_table_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &dir->lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock krc.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock &____s->seqcount#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1104 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1104 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1104 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#455 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#455 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#449 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1105 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1105 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 &tsk->futex_exit_mutex &rcu_state.expedited_wq irq_context: 0 &tsk->futex_exit_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &tsk->futex_exit_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &tsk->futex_exit_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 vlan_ioctl_mutex &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#82 lweventlist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1106 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1106 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1106 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1107 irq_context: 0 rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock rcu_node_0 irq_context: 0 &hdev->req_lock &hdev->lock &pcp->lock &zone->lock irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu pool_lock#2 irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock stock_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock key irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock pcpu_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock percpu_counters_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock pcpu_lock stock_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock pool_lock#2 irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &____s->seqcount#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &____s->seqcount irq_context: 0 &type->s_umount_key#46/1 &sb->s_type->i_mutex_key#18 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &base->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &base->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &base->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &base->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &base->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock fs_reclaim rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &xt[i].mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET &dccp_hashinfo.bhash[i].lock &dccp_hashinfo.bhash2[i].lock &n->list_lock &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 nf_sockopt_mutex &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &root->kernfs_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &ep->mtx fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1059 irq_context: 0 kn->active#15 remove_cache_srcu irq_context: 0 kn->active#15 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#15 remove_cache_srcu &c->lock irq_context: 0 kn->active#15 remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 kn->active#15 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 misc_mtx rcu_read_lock &pool->lock (worker)->lock irq_context: 0 (wq_completion)wg-kex-wg1#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&net->ipv6.addr_chk_work)->timer irq_context: softirq &(&net->ipv6.addr_chk_work)->timer rcu_read_lock &pool->lock irq_context: softirq &(&net->ipv6.addr_chk_work)->timer rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)kblockd (work_completion)(&(&q->requeue_work)->work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: softirq rcu_callback put_task_map-wait-type-override fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#456 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET &base->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &base->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#457 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 &rq->__lock irq_context: 0 &dev->mutex kn->active#4 rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex kn->active#4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->dev_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1110 irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) nfc_devlist_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1110 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1110 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1110 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wq->mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1051 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ebt_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &ht->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override kfence_freelist_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &p->pi_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rcu_state.expedited_wq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dpm_list_mtx &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&cache_cleaner)->timer rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#957 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#520 irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock &meta->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock kfence_freelist_lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 irq_context: 0 (wq_completion)events (work_completion)(&(&group->avgs_work)->work) &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &tbl->lock nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)events_unbound rcu_node_0 irq_context: 0 (wq_completion)events_unbound &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1111 irq_context: 0 &dentry->d_lock rcu_read_lock &sb->s_type->i_lock_key#24 &p->pi_lock irq_context: 0 &dentry->d_lock rcu_read_lock &sb->s_type->i_lock_key#24 &p->pi_lock &rq->__lock irq_context: 0 &dentry->d_lock rcu_read_lock &sb->s_type->i_lock_key#24 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1111 irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &dccp_hashinfo.bhash[i].lock &meta->lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_INET#2 &dccp_hashinfo.bhash[i].lock kfence_freelist_lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &meta->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock kfence_freelist_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#520 irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1111 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1111 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1112 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ipvs->est_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1112 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1112 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1113 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#460 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#460 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#454 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1113 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) devices_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &rnp->exp_wq[2] irq_context: 0 &dev->dev_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1114 irq_context: softirq &tx->clean_lock &base->lock irq_context: softirq &tx->clean_lock &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mdev->req_queue_mutex quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &dev->dev_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: softirq &(&bat_priv->dat.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1115 irq_context: 0 rtnl_mutex fib_info_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1115 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1116 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1116 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1118 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1118 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1118 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1118 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1120 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1120 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#463 irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1120 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#146 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#141 irq_context: 0 tty_mutex rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1121 irq_context: 0 &dev->mode_config.mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem rcu_node_0 irq_context: 0 &dev->mode_config.mutex &obj_hash[i].lock irq_context: 0 &mdev->req_queue_mutex vim2m:1183:(hdl)->_lock &rq->__lock irq_context: 0 &dev->mode_config.mutex pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#65 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 tty_mutex &rcu_state.expedited_wq irq_context: 0 sound_loader_lock irq_context: 0 tty_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 tty_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sound_oss_mutex irq_context: 0 tty_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &rmidi->open_mutex irq_context: 0 misc_mtx nfc_devlist_mutex subsys mutex#39 &rq->__lock irq_context: 0 &rmidi->open_mutex fs_reclaim irq_context: 0 &mm->mmap_lock fs_reclaim &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fs_reclaim &obj_hash[i].lock irq_context: 0 &mm->mmap_lock fs_reclaim pool_lock#2 irq_context: 0 &rmidi->open_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &rmidi->open_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &rmidi->open_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &rmidi->open_mutex pool_lock#2 irq_context: 0 &rmidi->open_mutex &card->files_lock irq_context: 0 &rmidi->open_mutex &rmidi->open_wait irq_context: 0 &rmidi->open_mutex &card->ctl_files_rwlock irq_context: 0 &rmidi->open_mutex &obj_hash[i].lock irq_context: 0 &rmidi->open_mutex &c->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &____s->seqcount#2 irq_context: 0 &mdev->req_queue_mutex vim2m:1183:(hdl)->_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock &base->lock &obj_hash[i].lock irq_context: 0 &rmidi->open_mutex &n->list_lock irq_context: 0 &rmidi->open_mutex &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh _xmit_ETHER#2 quarantine_lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &base->lock irq_context: 0 &rmidi->open_mutex &rdev->filelist_sem irq_context: 0 &rmidi->open_mutex &rdev->filelist_sem &rdev->filelist_lock irq_context: 0 &rmidi->open_mutex &substream->lock irq_context: 0 &rmidi->open_mutex (work_completion)(&runtime->event_work) irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &tn->lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET fill_pool_map-wait-type-override pool_lock irq_context: 0 &rmidi->open_wait irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rnp->exp_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rnp->exp_wq[3] irq_context: 0 &card->files_lock irq_context: 0 &card->files_lock shutdown_lock irq_context: 0 &card->files_lock &card->remove_sleep irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 &x->wait#8 &p->pi_lock irq_context: 0 &x->wait#8 &p->pi_lock &rq->__lock irq_context: 0 &x->wait#8 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &vcapture->lock &lock->wait_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &anon_vma->rwsem fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 &vcapture->lock &cfs_rq->removed.lock irq_context: 0 &vcapture->lock &obj_hash[i].lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex stock_lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex key irq_context: 0 vlan_ioctl_mutex rtnl_mutex pcpu_lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex percpu_counters_lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex pcpu_lock stock_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 &vcapture->lock pool_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &rmidi->open_mutex quarantine_lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock sb_writers#3 jbd2_handle &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &rmidi->open_mutex &____s->seqcount#2 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &rmidi->open_mutex &____s->seqcount irq_context: 0 &rmidi->open_mutex &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 rcu_node_0 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rcu_state.expedited_wq irq_context: 0 br_ioctl_mutex rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &rmidi->open_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 devpts_mutex &xa->xa_lock#21 &c->lock irq_context: 0 devpts_mutex &xa->xa_lock#21 pool_lock#2 irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &o_tty->termios_rwsem/1 purge_vmap_area_lock &pcp->lock &zone->lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &o_tty->termios_rwsem/1 purge_vmap_area_lock &____s->seqcount irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &o_tty->termios_rwsem/1 &rq->__lock irq_context: 0 &u->iolock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &u->iolock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &u->iolock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &u->iolock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 br_ioctl_mutex rtnl_mutex.wait_lock irq_context: 0 br_ioctl_mutex &p->pi_lock irq_context: 0 &tty->legacy_mutex &tty->legacy_mutex/1 &tty->ldisc_sem &o_tty->termios_rwsem/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&tty->hangup_work)#2 rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &cfs_rq->removed.lock irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &rq->__lock irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 videodev_lock &rq->__lock irq_context: 0 videodev_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 tty_mutex (work_completion)(&buf->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &tty->ldisc_sem &tty->ldisc_sem/1 &tty->termios_rwsem rcu_node_0 irq_context: 0 tty_mutex &xa->xa_lock#21 &obj_hash[i].lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 tty_mutex &xa->xa_lock#21 pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1121 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &node->ep_lock &rq->__lock irq_context: 0 &node->ep_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1121 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1122 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 kn->active#48 &kernfs_locks->open_file_mutex[count] &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1122 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&gc_work->dwork)->work) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->master_mutex fs_reclaim &rq->__lock irq_context: 0 &dev->master_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &rq->__lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex &rq->__lock &base->lock irq_context: 0 &xt[i].mutex &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events free_ipc_work sysctl_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1034 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1034 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1032 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1032 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1033 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1033 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1033 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1034 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#414 irq_context: 0 pernet_ops_rwsem rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#414 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#408 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1034 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1034 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1035 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#406 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1035 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1035 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1035 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1036 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1036 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1036 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1036 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1037 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#415 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#415 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#409 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#407 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1037 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim percpu_counters_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1037 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1037 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1037 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1037 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1038 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1038 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1038 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#416 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#416 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#416 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#416 irq_context: 0 &mdev->req_queue_mutex &dev->dev_mutex quarantine_lock irq_context: 0 &f->f_pos_lock sb_writers#4 remove_cache_srcu irq_context: 0 &f->f_pos_lock sb_writers#4 remove_cache_srcu quarantine_lock irq_context: 0 &f->f_pos_lock sb_writers#4 remove_cache_srcu &c->lock irq_context: 0 &f->f_pos_lock sb_writers#4 remove_cache_srcu &n->list_lock irq_context: 0 &f->f_pos_lock sb_writers#4 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#410 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#408 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &____s->seqcount#2 irq_context: 0 &data->open_mutex triggers_list_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key/1 &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1038 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1038 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1038 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex bpf_devs_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1038 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1039 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#417 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#417 &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#417 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#417 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#411 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#409 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#409 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#409 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1039 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1040 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1040 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 batched_entropy_u8.lock crngs.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#4 &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1040 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1040 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock lock kernfs_idr_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1041 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &simple_offset_xa_lock kfence_freelist_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1041 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1041 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#418 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#418 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#418 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#418 irq_context: 0 tomoyo_ss mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#412 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#410 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1041 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1041 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1041 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1042 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1042 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1042 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#419 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#419 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#413 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#411 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1042 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1042 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1042 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1043 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1043 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#412 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#412 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#412 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#414 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#420 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#420 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#420 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1043 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1043 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1043 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1043 &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#9 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1043 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1043 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1043 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1044 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#421 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#421 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#421 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#421 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#415 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#413 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1044 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1044 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1044 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1045 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1045 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1045 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1045 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1046 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1046 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1046 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1046 &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#422 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock &tbl->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#422 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#422 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#422 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#416 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex &cfs_rq->removed.lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex &obj_hash[i].lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &wq->mutex pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#414 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#414 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#414 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1046 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex batched_entropy_u8.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1046 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#423 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#423 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#417 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#415 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1046 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1047 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1047 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1047 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1047 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1048 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#424 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#424 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#514 irq_context: 0 &mm->mmap_lock fs_reclaim &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fs_reclaim &obj_hash[i].lock irq_context: 0 &mm->mmap_lock fs_reclaim pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#512 irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 key irq_context: 0 &type->i_mutex_dir_key#4 pcpu_lock irq_context: 0 &type->i_mutex_dir_key#4 percpu_counters_lock irq_context: 0 &f->f_pos_lock &p->lock stock_lock irq_context: 0 &f->f_pos_lock &p->lock capidev_list_lock irq_context: hardirq rcu_state.barrier_lock fill_pool_map-wait-type-override &n->list_lock irq_context: hardirq rcu_state.barrier_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#418 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#416 irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &base->lock irq_context: 0 (wq_completion)events pcpu_balance_work pcpu_alloc_mutex &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1048 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1048 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1048 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key#8 &p->pi_lock irq_context: 0 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key#8 &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key#8 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &c->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->tx_work) &list->lock#5 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 pernet_ops_rwsem rtnl_mutex stock_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex pcpu_lock stock_lock irq_context: 0 rtnl_mutex dev_addr_sem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock &lock->wait_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock sched_map-wait-type-override &pool->lock irq_context: 0 &xt[i].mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &idev->mc_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&conn->pending_rx_work) irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1123 irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1140 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1143 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock irq_context: 0 kn->active#13 remove_cache_srcu quarantine_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 pcpu_lock irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 percpu_counters_lock irq_context: 0 cb_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock &sta->rate_ctrl_lock kfence_freelist_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu pcpu_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &c->lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#392 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#392 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex rcu_read_lock &rq->__lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#392 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#392 &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#392 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 smack_known_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#386 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#384 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#119 irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#147 irq_context: 0 sb_writers#3 sb_internal key irq_context: 0 sb_writers#3 sb_internal pcpu_lock irq_context: 0 sb_writers#3 sb_internal percpu_counters_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 cgroup_threadgroup_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1143 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1143 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#119 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 &mm->mmap_lock sb_writers#5 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#5 &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock sb_writers#5 &obj_hash[i].lock irq_context: 0 &mm->mmap_lock sb_writers#5 pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 sb_writers#6 key irq_context: 0 sb_writers#6 pcpu_lock irq_context: 0 sb_writers#6 percpu_counters_lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (reaper_work).work &base->lock irq_context: 0 (wq_completion)events_unbound (reaper_work).work &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &ei->xattr_sem irq_context: 0 &ei->xattr_sem mmu_notifier_invalidate_range_start irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &mapping->i_mmap_rwsem irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&(&ssp->srcu_sup->work)->work) pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex batched_entropy_u8.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex kfence_freelist_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu quarantine_lock irq_context: 0 &sb->s_type->i_lock_key &xa->xa_lock#9 irq_context: 0 &info->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &meta->lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &journal->j_state_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle irq_context: 0 &ndev->req_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &sbi->s_orphan_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_raw_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle tk_core.seq.seqcount irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock jbd2_handle &journal->j_wait_updates irq_context: 0 &sb->s_type->i_mutex_key#8 mapping.invalidate_lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start irq_context: 0 &sb->s_type->i_mutex_key#8 jbd2_handle irq_context: 0 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &rq->__lock cpu_asid_lock irq_context: 0 &sb->s_type->i_mutex_key#8 jbd2_handle &ei->i_raw_lock irq_context: 0 &sb->s_type->i_mutex_key#8 jbd2_handle &journal->j_wait_updates irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#8 &sb->s_type->i_lock_key#22 irq_context: 0 &sb->s_type->i_mutex_key#8 &wb->list_lock irq_context: 0 rtnl_mutex uevent_sock_mutex &obj_hash[i].lock pool_lock irq_context: 0 &sb->s_type->i_mutex_key#8 &wb->list_lock &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 fs_reclaim irq_context: 0 sb_writers#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &zone->lock irq_context: 0 sb_writers#3 &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 rcu_read_lock &____s->seqcount#4 irq_context: 0 sb_writers#3 &mm->mmap_lock irq_context: 0 sb_writers#3 &mm->mmap_lock &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &cfs_rq->removed.lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 pool_lock#2 irq_context: 0 sb_writers#3 &p->alloc_lock irq_context: 0 sb_writers#3 rcu_read_lock &____s->seqcount#3 irq_context: 0 sb_writers#3 rcu_read_lock rcu_read_lock mount_lock.seqcount irq_context: 0 sb_writers#3 rcu_read_lock rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 stock_lock irq_context: 0 sb_writers#3 &f->f_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &xa->xa_lock#9 stock_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_node_0 irq_context: 0 rtnl_mutex smc_ib_devices.mutex &rq->__lock irq_context: 0 rtnl_mutex smc_ib_devices.mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex subsys mutex#20 &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &n->list_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &n->list_lock &c->lock irq_context: 0 &pipe->mutex/1 &pipe->rd_wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &vma->vm_lock->lock remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &n->list_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &n->list_lock &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 lock#4 &obj_hash[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock fs_reclaim irq_context: 0 sb_writers#3 &mm->mmap_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#3 &mm->mmap_lock &____s->seqcount irq_context: 0 sb_writers#3 &mm->mmap_lock stock_lock irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#9 irq_context: 0 sb_writers#3 &mm->mmap_lock &sb->s_type->i_lock_key irq_context: 0 sb_writers#3 &mm->mmap_lock &info->lock irq_context: 0 sb_writers#5 rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock lock#4 irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 key irq_context: 0 sb_writers#3 &mm->mmap_lock lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#3 &mm->mmap_lock &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &mm->mmap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#9 stock_lock irq_context: 0 sb_writers#4 &mm->mmap_lock &rq->__lock irq_context: 0 sb_writers#4 &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#9 pool_lock#2 irq_context: 0 sb_writers#3 &mm->mmap_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock key irq_context: 0 sb_writers#3 &mm->mmap_lock pcpu_lock irq_context: 0 sb_writers#3 &mm->mmap_lock percpu_counters_lock irq_context: 0 sb_writers#3 &mm->mmap_lock pool_lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &____s->seqcount#2 irq_context: 0 sb_writers#3 &mm->mmap_lock &xa->xa_lock#9 &c->lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock fs_reclaim &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock ptlock_ptr(ptdesc)#2 irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock key irq_context: 0 &u->iolock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 sb_writers#3 &sb->s_type->i_mutex_key#8 &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET6 stock_lock irq_context: 0 sk_lock-AF_INET6 key irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 &meta->lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &mm->mmap_lock rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 cb_lock &rq->__lock cpu_asid_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &sbi->s_orphan_lock &mapping->i_private_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#119 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 sb_writers#3 &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &sfilter->notify_lock &meta->lock irq_context: 0 &sfilter->notify_lock kfence_freelist_lock irq_context: 0 &u->iolock stock_lock irq_context: 0 &u->iolock key irq_context: 0 &u->iolock pcpu_lock irq_context: 0 &u->iolock percpu_counters_lock irq_context: 0 &child->perf_event_mutex &cfs_rq->removed.lock irq_context: 0 &child->perf_event_mutex &obj_hash[i].lock irq_context: 0 &u->iolock pcpu_lock stock_lock irq_context: 0 &child->perf_event_mutex pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &sbi->s_md_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &sbi->s_md_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &ei->i_prealloc_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &pa->pa_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &(ei->i_block_reservation_lock) irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock pcpu_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock &obj_hash[i].lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock &c->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock percpu_counters_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock &blkcg->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_read_lock &q->queue_lock &blkcg->lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_raw_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock rcu_node_0 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock &pa->pa_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock rcu_node_0 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem key#3 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &(ei->i_block_reservation_lock) key#15 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &xa->xa_lock#9 key#11 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock key#11 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle tk_core.seq.seqcount irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &c->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &____s->seqcount irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &base->lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) &base->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &lock->wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &p->pi_lock &rq->__lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &tsk->futex_exit_mutex &mm->mmap_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rtnl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock fs_reclaim &rq->__lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock pool_lock#2 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem pcpu_lock stock_lock irq_context: 0 sb_writers#6 &rq->__lock &cfs_rq->removed.lock irq_context: softirq security/integrity/ima/ima_queue_keys.c:35 irq_context: softirq security/integrity/ima/ima_queue_keys.c:35 rcu_read_lock &pool->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle pcpu_lock stock_lock irq_context: softirq security/integrity/ima/ima_queue_keys.c:35 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq security/integrity/ima/ima_queue_keys.c:35 rcu_read_lock &pool->lock &p->pi_lock irq_context: softirq security/integrity/ima/ima_queue_keys.c:35 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq security/integrity/ima/ima_queue_keys.c:35 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq (&peer->timer_send_keepalive) &n->list_lock &c->lock irq_context: 0 (wq_completion)events (ima_keys_delayed_work).work irq_context: 0 (wq_completion)events (ima_keys_delayed_work).work ima_keys_lock irq_context: 0 (wq_completion)events (ima_keys_delayed_work).work &obj_hash[i].lock irq_context: 0 (wq_completion)events (ima_keys_delayed_work).work pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &c->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &retval->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock tk_core.seq.seqcount irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle rcu_read_lock &n->list_lock &c->lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock rcu_node_0 irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &dev->mode_config.idr_mutex irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rq->__lock irq_context: softirq rcu_read_lock &br->multicast_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &c->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &retval->lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &n->list_lock irq_context: 0 &sbi->s_writepages_rwsem rcu_read_lock &n->list_lock &c->lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock key#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &____s->seqcount irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &xa->xa_lock#9 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &xa->xa_lock#9 stock_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &xa->xa_lock#9 pool_lock#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &rq->__lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem lock#4 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &____s->seqcount#2 irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle lock#4 &lruvec->lru_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 &sbi->s_writepages_rwsem jbd2_handle &ei->i_data_sem &ei->i_es_lock &c->lock irq_context: 0 &dev->mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)ext4-rsv-conversion (work_completion)(&ei->i_rsv_conversion_work) jbd2_handle &ei->i_es_lock key#2 irq_context: 0 &sb->s_type->i_mutex_key#9 batched_entropy_u8.lock irq_context: 0 &sb->s_type->i_mutex_key#9 kfence_freelist_lock irq_context: 0 &ei->i_data_sem &ei->i_prealloc_lock &pa->pa_lock#2 irq_context: 0 &mm->mmap_lock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 &ei->i_data_sem &obj_hash[i].lock irq_context: 0 &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 &xa->xa_lock#9 irq_context: 0 sb_writers#3 &mapping->i_private_lock irq_context: 0 sb_writers#3 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 irq_context: 0 sb_writers#3 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 &obj_hash[i].lock irq_context: 0 sb_writers#3 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 pool_lock#2 irq_context: 0 sb_writers#3 lock#4 irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#479 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 lock#4 &lruvec->lru_lock irq_context: 0 sb_writers#3 lock#5 irq_context: 0 sb_writers#3 &lruvec->lru_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#150 irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_NETLINK &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock stock_lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1312 irq_context: 0 sb_writers#3 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#3 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &sb->s_type->i_lock_key#22 &xa->xa_lock#9 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_prealloc_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim &obj_hash[i].lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock#2 rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 &xt[i].mutex rcu_read_lock percpu_counters_lock irq_context: 0 &xt[i].mutex rcu_read_lock pcpu_lock stock_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex fs_reclaim pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_raw_lock irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex rcu_node_0 irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex &rcu_state.expedited_wq irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock put_task_map-wait-type-override#3 stock_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock pool_lock#2 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock &sbi->s_es_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex net_rwsem &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock key#6 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem pool_lock#2 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &sbi->s_md_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &sb->s_type->i_lock_key#22 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem key#3 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &c->lock irq_context: 0 drm_connector_list_iter &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] remove_cache_srcu irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] remove_cache_srcu quarantine_lock irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &c->lock irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &n->list_lock irq_context: 0 kn->active#14 &kernfs_locks->open_file_mutex[count] remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ret->b_state_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &ret->b_state_lock &journal->j_list_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_data_sem &journal->j_revoke_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle tk_core.seq.seqcount irq_context: 0 sb_writers#3 sb_internal jbd2_handle &ei->i_prealloc_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &journal->j_list_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 &rnp->exp_lock irq_context: 0 rcu_state.exp_mutex irq_context: 0 rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex.wait_lock irq_context: 0 &sfilter->notify_lock pool_lock#2 irq_context: 0 &hdev->req_lock &hdev->lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 sk_lock-AF_INET6 pcpu_lock irq_context: 0 sk_lock-AF_INET6 percpu_counters_lock irq_context: 0 sk_lock-AF_INET6 pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&peer->transmit_packet_work) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&(&rdev->dfs_update_channels_wk)->work) rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 sk_lock-AF_NETLINK stock_lock irq_context: 0 sk_lock-AF_NETLINK key irq_context: 0 sk_lock-AF_NETLINK pcpu_lock irq_context: 0 sk_lock-AF_NETLINK percpu_counters_lock irq_context: 0 sk_lock-AF_NETLINK pcpu_lock stock_lock irq_context: 0 sk_lock-AF_NETLINK &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#899 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss rcu_read_lock rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tomoyo_ss &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &dentry->d_lock &wq#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &sbinfo->stat_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &xa->xa_lock#5 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &xa->xa_lock#5 pool_lock#2 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &sb->s_type->i_lock_key#5 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &s->s_inode_list_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 tk_core.seq.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 batched_entropy_u32.lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &xattrs->lock irq_context: 0 nfc_devlist_mutex rcu_read_lock &rq->__lock irq_context: 0 nfc_devlist_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#899 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#925 irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1379 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#925 irq_context: 0 rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#592 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1305 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1305 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1305 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#394 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem rcu_node_0 irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex batched_entropy_u8.lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &rq->__lock irq_context: 0 pernet_ops_rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &n->lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &n->lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock nl_table_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock nl_table_wait.lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock rcu_read_lock lock#8 irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock rcu_read_lock id_table_lock irq_context: 0 &wq->mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock krc.lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &n->list_lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock tk_core.seq.seqcount irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex kfence_freelist_lock irq_context: 0 remove_cache_srcu per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 remove_cache_srcu pcpu_lock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex quarantine_lock irq_context: 0 &dev->clientlist_mutex &cfs_rq->removed.lock irq_context: 0 &dev->clientlist_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1141 irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1142 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#119 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1028 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire fs_reclaim irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire &c->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &c->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &dev->mode_config.connector_list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter fs_reclaim irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter pool_lock#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &c->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &crtc->commit_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &x->wait#15 irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&(&kfence_timer)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock &____s->seqcount#5 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim rcu_node_0 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vblank_time_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &base->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &base->lock &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#116 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#115 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#982 irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PHONET &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#982 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#982 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#983 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1144 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1144 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 key irq_context: 0 rtnl_mutex dev_base_lock &xa->xa_lock#4 &obj_hash[i].lock irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim &cfs_rq->removed.lock irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim &obj_hash[i].lock irq_context: 0 &p->lock &of->mutex kn->active#4 fs_reclaim pool_lock#2 irq_context: 0 rtnl_mutex dev_base_lock &xa->xa_lock#4 pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &meta->lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 pcpu_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock quarantine_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 percpu_counters_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 key irq_context: 0 &sig->cred_guard_mutex remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#393 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#393 irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#387 irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 pcpu_lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 percpu_counters_lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 rtnl_mutex &sb->s_type->i_mutex_key#3 pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 sk_lock-AF_INET rcu_read_lock &____s->seqcount#7 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex (&timer.timer) irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex (work_completion)(&vkms_state->composer_work)#2 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &n->list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &n->list_lock &c->lock irq_context: 0 &dev->master_mutex &lock->wait_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &obj_hash[i].lock pool_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &lock->wait_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rcu_state.expedited_wq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 sk_lock-AF_INET rcu_read_lock &nf_nat_locks[i] irq_context: 0 sk_lock-AF_INET rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 sk_lock-AF_INET rcu_read_lock &nf_conntrack_locks[i] batched_entropy_u8.lock irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start key irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock pool_lock#2 irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start percpu_counters_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &____s->seqcount#9 irq_context: 0 crtc_ww_class_mutex irq_context: 0 crtc_ww_class_mutex &mm->mmap_lock irq_context: 0 &dev->mode_config.idr_mutex &mm->mmap_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rlock-AF_INET irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#74 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem pcpu_lock stock_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &dir->lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem fib_info_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: softirq rcu_read_lock rcu_read_lock &sctp_ep_hashtable[i].lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 &c->lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 crngs.lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 &____s->seqcount irq_context: 0 (wq_completion)nfc3_nci_tx_wq#385 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#120 irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#120 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#117 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#117 &rq->__lock irq_context: 0 kn->active#14 remove_cache_srcu irq_context: 0 kn->active#14 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#14 remove_cache_srcu &c->lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#117 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock &ul->lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#116 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#116 &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->krw_arr[i].rcu_work)->work) rcu_callback &cfs_rq->removed.lock irq_context: 0 kn->active#14 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#14 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem stock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem key irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem percpu_counters_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem &meta->lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem kfence_freelist_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &p->pi_lock &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#116 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &n->list_lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#983 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#983 irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#148 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock krc.lock &base->lock irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#983 irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1051 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1052 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1054 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1054 irq_context: 0 &dev->mode_config.mutex &lock->wait_lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock kfence_freelist_lock irq_context: softirq (&icsk->icsk_retransmit_timer) slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &meta->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#984 irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex sched_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#394 irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex quarantine_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &bat_priv->mcast.mla_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock &base->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock pool_lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex stock_lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex key irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &nf_nat_locks[i] irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)nfc3_nci_rx_wq#388 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] batched_entropy_u8.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#9 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#386 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#984 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#984 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_RXRPC &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &obj_hash[i].lock pool_lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 sk_lock-AF_INET krc.lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem batched_entropy_u8.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &meta->lock irq_context: 0 sk_lock-AF_INET krc.lock fill_pool_map-wait-type-override &c->lock irq_context: 0 sk_lock-AF_INET krc.lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sk_lock-AF_INET krc.lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu quarantine_lock irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu &c->lock irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu &n->list_lock irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu &____s->seqcount irq_context: 0 &hdev->req_lock &hdev->lock remove_cache_srcu pool_lock#2 irq_context: softirq (&peer->timer_send_keepalive) &n->list_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#984 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#985 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1144 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#150 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#985 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#150 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#150 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex &q->mmap_lock &rq->__lock irq_context: 0 &mdev->req_queue_mutex &dev_instance->mutex &q->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 br_ioctl_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 br_ioctl_mutex &p->pi_lock &rq->__lock irq_context: 0 br_ioctl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex _xmit_ETHER fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#150 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1128 irq_context: 0 &mm->mmap_lock remove_cache_srcu stock_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#3 batched_entropy_u8.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1145 irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#476 irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)nfc2_nci_rx_wq#985 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#986 irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->power_on) &c->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock kfence_freelist_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1201 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1201 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1201 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1201 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1201 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1202 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg1#78 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#521 irq_context: 0 rtnl_mutex stack_depot_init_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &rdev->wiphy.mtx &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#521 &rq->__lock irq_context: 0 vlan_ioctl_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &net->ipv6.ip6addrlbl_table.lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex &rnp->exp_wq[0] irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#521 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#521 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#515 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#515 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#515 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg1#155 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#513 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#168 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#168 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1308 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1308 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#46 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#204 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1309 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1309 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#588 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &n->list_lock irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &n->list_lock &c->lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#596 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#206 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#24 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1313 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1313 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1313 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#591 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock stock_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock key irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock pcpu_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock percpu_counters_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock pcpu_lock stock_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex.wait_lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &n->list_lock &c->lock irq_context: 0 rtnl_mutex _xmit_ETHER rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#599 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &xa->xa_lock#4 irq_context: 0 (wq_completion)bond0#80 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) free_vmap_area_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) free_vmap_area_lock pool_lock#2 irq_context: 0 crtc_ww_class_mutex &mm->mmap_lock &sem->wait_lock irq_context: 0 crtc_ww_class_mutex &mm->mmap_lock &rq->__lock irq_context: 0 crtc_ww_class_mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 &type->s_umount_key#21/1 &n->list_lock irq_context: 0 &type->s_umount_key#21/1 &n->list_lock &c->lock irq_context: 0 &type->s_umount_key#21/1 &rq->__lock irq_context: 0 &type->s_umount_key#21/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock &c->lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &n->list_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &n->list_lock &c->lock irq_context: 0 kn->active#52 &kernfs_locks->open_file_mutex[count] &c->lock irq_context: 0 drm_unplug_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 rtnl_mutex _xmit_ETHER/1 _xmit_ETHER &c->lock irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)bond0#80 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#80 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)bond0#80 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex.wait_lock irq_context: 0 (wq_completion)bond0#80 (work_completion)(&(&slave->notify_work)->work) &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#207 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#207 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1316 irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#208 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#601 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#601 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#154 irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 &xt[i].mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &xt[i].mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 key irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 pcpu_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 percpu_counters_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 pcpu_lock stock_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#155 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &br->multicast_lock &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1319 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1319 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#155 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#155 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem batched_entropy_u8.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex rcu_read_lock key#26 irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 cb_lock genl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 rtnl_mutex &idev->mc_lock fs_reclaim &rq->__lock irq_context: 0 (wq_completion)bond0#72 irq_context: 0 (wq_completion)bond0#72 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#72 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#72 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#72 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#395 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#395 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#395 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex deferred_probe_mutex &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#395 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#395 &obj_hash[i].lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock ptlock_ptr(ptdesc)#2 &folio_wait_table[i] &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#395 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 &meta->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 kfence_freelist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#389 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#389 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#389 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#986 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#986 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#986 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock stock_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock key irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#10 rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#387 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#986 irq_context: 0 rtnl_mutex team->team_lock_key#72 fs_reclaim irq_context: 0 rtnl_mutex team->team_lock_key#72 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex team->team_lock_key#72 netpoll_srcu irq_context: 0 ebt_mutex rcu_read_lock &rq->__lock irq_context: 0 ebt_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#72 net_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#72 net_rwsem &list->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-sk_lock-AF_INET &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX &x->wait#3 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_PPPOX clock-AF_PPPOX irq_context: 0 rtnl_mutex team->team_lock_key#72 &tn->lock irq_context: 0 &sb->s_type->i_mutex_key#10 slock-AF_PPPOX irq_context: 0 rtnl_mutex team->team_lock_key#72 _xmit_ETHER irq_context: 0 rtnl_mutex team->team_lock_key#72 &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#72 input_pool.lock irq_context: 0 rtnl_mutex team->team_lock_key#72 rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#72 &obj_hash[i].lock irq_context: 0 &dev->mutex uevent_sock_mutex mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex stock_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex pcpu_lock stock_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 nl_table_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 nl_table_wait.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &sem->wait_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex team->team_lock_key#72 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#72 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#72 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#72 &in_dev->mc_tomb_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 &im->lock irq_context: 0 kn->active#16 remove_cache_srcu irq_context: 0 kn->active#16 remove_cache_srcu quarantine_lock irq_context: 0 kn->active#16 remove_cache_srcu &c->lock irq_context: 0 kn->active#16 remove_cache_srcu &rq->__lock irq_context: 0 kn->active#16 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#72 cbs_list_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#72 sysfs_symlink_target_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#72 &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#72 lock irq_context: 0 rtnl_mutex team->team_lock_key#72 lock kernfs_idr_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 &root->kernfs_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#72 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#72 quarantine_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 remove_cache_srcu irq_context: 0 rtnl_mutex team->team_lock_key#72 remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#72 remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#72 remove_cache_srcu pool_lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#72 lweventlist_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 lweventlist_lock &dir->lock#2 irq_context: 0 kn->active#16 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#16 remove_cache_srcu &obj_hash[i].lock irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem &cfs_rq->removed.lock irq_context: 0 rtnl_mutex (inet6addr_validator_chain).rwsem &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &p->pi_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 &xt[i].mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &xt[i].mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 (console_sem).lock irq_context: 0 rtnl_mutex team->team_lock_key#72 console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 console_lock console_srcu console_owner irq_context: 0 rtnl_mutex team->team_lock_key#72 console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex team->team_lock_key#72 console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#72 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rcu_read_lock &data->fib_event_queue_lock irq_context: 0 kn->active#4 &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex team->team_lock_key#72 &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &type->s_umount_key#47 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &type->s_umount_key#47 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &type->s_umount_key#47 &dentry->d_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &dentry->d_lock rcu_read_lock &p->pi_lock irq_context: 0 &dentry->d_lock rcu_read_lock &sb->s_type->i_lock_key#23 &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#72 pool_lock#2 irq_context: 0 &xt[i].mutex &mm->mmap_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &mm->mmap_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle stock_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex quarantine_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle pcpu_lock stock_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock key irq_context: 0 &ep->mtx fs_reclaim &rq->__lock irq_context: 0 &ep->mtx fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &mm->mmap_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#986 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#986 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#986 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#987 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#987 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#233 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#233 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1374 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1374 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1374 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1374 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1197 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 rtnl_mutex dev_addr_sem stock_lock irq_context: 0 rtnl_mutex dev_addr_sem key irq_context: 0 rtnl_mutex dev_addr_sem pcpu_lock irq_context: 0 rtnl_mutex dev_addr_sem percpu_counters_lock irq_context: 0 rtnl_mutex dev_addr_sem pcpu_lock stock_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &br->lock lweventlist_lock &c->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &base->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &ndev->lock &ifa->lock batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 cb_lock genl_mutex percpu_counters_lock irq_context: 0 cb_lock genl_mutex pcpu_lock stock_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem batched_entropy_u8.lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem kfence_freelist_lock irq_context: 0 rtnl_mutex dev_addr_sem &tbl->lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&w->w) &meta->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&w->w) kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER/1 &n->list_lock &c->lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rcu_state.expedited_wq irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &p->pi_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &macsec_netdev_addr_lock_key/1 &n->list_lock &c->lock irq_context: 0 &xa->xa_lock#9 &n->list_lock irq_context: 0 &xa->xa_lock#9 &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: hardirq &fq->mq_flush_lock quarantine_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 kn->active#16 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#16 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 &dev->mutex rfkill_global_mutex stock_lock irq_context: 0 &dev->mutex rfkill_global_mutex pcpu_lock stock_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#7 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &hdev->req_lock &hdev->lock &sem->wait_lock irq_context: 0 &hdev->req_lock &hdev->lock &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &p->alloc_lock &x->wait#25 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) rcu_node_0 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock uevent_sock_mutex.wait_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 &hdev->req_lock &hdev->lock &p->pi_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 uevent_sock_mutex.wait_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex rcu_read_lock &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &meta->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex rcu_node_0 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rcu_state.expedited_wq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire remove_cache_srcu &n->list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire remove_cache_srcu &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire remove_cache_srcu &obj_hash[i].lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire remove_cache_srcu pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock percpu_counters_lock irq_context: 0 (work_completion)(&local->sdreq_timeout_work) &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 rtnl_mutex uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 misc_mtx &meta->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle smack_known_lock pcpu_lock stock_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire &n->list_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire &n->list_lock &c->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_sysfs_mtx.wait_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim &rq->__lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &c->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 &dev->clientlist_mutex &lock->wait_lock irq_context: 0 &dev->clientlist_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem kernfs_idr_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 &dev->clientlist_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->master_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->master_mutex rcu_read_lock &rq->__lock irq_context: 0 &dev->master_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex console_lock console_srcu console_owner_lock irq_context: 0 cb_lock genl_mutex console_lock console_srcu console_owner irq_context: 0 cb_lock genl_mutex console_lock console_srcu console_owner &port_lock_key irq_context: 0 cb_lock genl_mutex console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex sysctl_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#987 &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#72 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex team->team_lock_key#72 _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#72 _xmit_ETHER &n->list_lock irq_context: 0 rtnl_mutex fib_info_lock pool_lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#72 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#987 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#396 irq_context: 0 &data->open_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#396 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#390 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#388 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#987 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#987 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#386 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#987 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &____s->seqcount#2 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 &____s->seqcount irq_context: 0 kn->active#13 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#380 irq_context: 0 &mm->mmap_lock &vma->vm_lock->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#987 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#987 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#380 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#380 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#378 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#378 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#378 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#972 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#972 irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&work->work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#974 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#112 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#111 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nl_table_lock nl_table_wait.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#17 irq_context: 0 nl_table_wait.lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) purge_vmap_area_lock quarantine_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#987 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock rcu_node_0 irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock &rcu_state.expedited_wq irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mdev->req_queue_mutex vicodec_core:1851:(hdl)->_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#17 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#987 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#987 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#988 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex fs_reclaim &rq->__lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 &dev->mutex leds_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock &____s->seqcount#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 &mm->mmap_lock rcu_read_lock rcu_read_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC &tn->nametbl_lock &service->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1324 irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &devlink_port->type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex net_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &tn->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &x->wait#9 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 batched_entropy_u8.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 kfence_freelist_lock irq_context: 0 sb_writers#9 remove_cache_srcu &rq->__lock irq_context: 0 sb_writers#9 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#7 &rq->__lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle key irq_context: 0 sb_writers#3 sb_internal jbd2_handle pcpu_lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle percpu_counters_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc36_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc35_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc35_nci_rx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc35_nci_rx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc35_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#11 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#608 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#10 irq_context: 0 tomoyo_ss &obj_hash[i].lock pool_lock irq_context: 0 misc_mtx &dev->mutex fs_reclaim &rq->__lock irq_context: 0 misc_mtx &dev->mutex fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc14_nci_rx_wq#10 &rq->__lock irq_context: 0 &sig->cred_guard_mutex &fs->lock &dentry->d_lock irq_context: 0 (wq_completion)nfc7_nci_tx_wq#18 irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem key irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem pcpu_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem percpu_counters_lock irq_context: 0 (wq_completion)nfc22_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#7 irq_context: 0 &q->queue_lock &n->list_lock irq_context: 0 &q->queue_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#9 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 fs_reclaim irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 &n->list_lock irq_context: 0 remove_cache_srcu rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 remove_cache_srcu rcu_read_lock &cfs_rq->removed.lock irq_context: 0 remove_cache_srcu rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 nl_table_lock irq_context: 0 &type->s_umount_key#47 rcu_node_0 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 net_rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 &tn->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 &rq->__lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#72 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx &rq->__lock irq_context: 0 nfc_devlist_mutex dev_pm_qos_sysfs_mtx dev_pm_qos_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc31_nci_rx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc31_nci_tx_wq#7 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ul->lock#2 pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ul->lock#2 &dir->lock#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1342 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1342 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1342 &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &rq->__lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh &base->lock irq_context: 0 cb_lock genl_mutex rcu_read_lock rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#28 irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_tx_wq#12 irq_context: 0 &data->open_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 &data->open_mutex rfkill_global_mutex &sem->wait_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#28 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1345 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1345 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1346 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1346 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1197 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1198 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1171 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1171 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#621 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#621 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#621 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#622 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1350 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#623 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#623 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#224 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#224 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1349 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1349 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1349 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1350 &rq->__lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock batched_entropy_u8.lock irq_context: 0 cb_lock kfence_freelist_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 rcu_node_0 irq_context: softirq &(&net->ipv6.addr_chk_work)->timer rcu_read_lock &pool->lock pool_lock#2 irq_context: softirq &(&net->ipv6.addr_chk_work)->timer rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fs_reclaim &rq->__lock irq_context: 0 &type->s_umount_key#46/1 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#625 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1350 irq_context: 0 sk_lock-AF_PACKET stock_lock irq_context: 0 sk_lock-AF_PACKET pcpu_lock stock_lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)hci0#11 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 kn->active#46 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: softirq slock-AF_INET#2 rcu_read_lock rcu_read_lock rcu_read_lock_bh &r->producer_lock#3 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#617 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#226 irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 &dev->master_mutex quarantine_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 ebt_mutex &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#226 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#223 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#53 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex kfence_freelist_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &meta->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex devlinks.xa_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex devlinks.xa_lock pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1353 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1355 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1355 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#630 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1356 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1376 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1375 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1377 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1376 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#646 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1378 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1379 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1379 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] &n->list_lock irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#234 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#234 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1181 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1182 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#231 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#231 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1381 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1381 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1381 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1381 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1380 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1380 irq_context: 0 rtnl_mutex devnet_rename_sem &cfs_rq->removed.lock irq_context: 0 sb_writers#3 remove_cache_srcu pcpu_lock irq_context: 0 sb_writers#3 remove_cache_srcu percpu_counters_lock irq_context: 0 sb_writers#3 remove_cache_srcu pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#235 irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#235 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#232 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#230 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#648 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#648 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#641 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#637 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#637 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1382 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#631 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#631 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#624 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#624 &rq->__lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1170 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1170 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1170 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1170 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1170 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem pcpu_alloc_mutex.wait_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem &p->pi_lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#490 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#490 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#484 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#482 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1357 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &meta->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1358 irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1049 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#130 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#130 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#127 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#125 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#125 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#125 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1049 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->bla.work)->work) rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#228 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#228 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#634 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#635 irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1361 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1361 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#637 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1382 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1381 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1381 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1381 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1381 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1383 irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1383 &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&barr->work) &x->wait#10 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &meta->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh kfence_freelist_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock &dev_addr_list_lock_key#2/1 rcu_read_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (stats_flush_dwork).work &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#603 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#596 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1321 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1383 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#649 irq_context: 0 &type->i_mutex_dir_key#4 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#649 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1383 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1382 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1382 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1382 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#642 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#642 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#642 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex k-sk_lock-AF_INET &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1382 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 tomoyo_ss &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#638 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#638 &rq->__lock irq_context: 0 (wq_completion)bond0#83 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1382 &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &rq->__lock irq_context: 0 remove_cache_srcu rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 remove_cache_srcu rcu_read_lock pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#491 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#491 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#485 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex.wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1172 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#493 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#638 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1382 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1124 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1124 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &lock->wait_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1124 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1126 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1126 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1127 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#468 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1128 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1129 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#469 irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock key irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#469 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#142 irq_context: 0 sb_writers tomoyo_ss quarantine_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1382 &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1382 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#604 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#236 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#236 irq_context: 0 kn->active#4 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#233 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#510 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#165 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#163 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 key irq_context: 0 rtnl_mutex &ndev->lock &tb->tb6_lock quarantine_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 pcpu_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 percpu_counters_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#31 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#31 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#525 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1219 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#173 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#168 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: hardirq &vkms_out->lock &dev->event_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 tomoyo_ss rcu_read_lock rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)bond0#79 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx uevent_sock_mutex &n->list_lock &c->lock irq_context: hardirq &vkms_out->lock &dev->event_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#121 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1237 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 misc_mtx (wq_completion)nfc2_nci_rx_wq#973 irq_context: 0 misc_mtx (wq_completion)nfc2_nci_cmd_wq#973 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#13 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex remove_cache_srcu irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#231 irq_context: 0 rtnl_mutex nf_hook_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex key irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1133 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx gdp_mutex remove_cache_srcu quarantine_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex stock_lock irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex pcpu_lock stock_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1133 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1133 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1384 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1384 &rq->__lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1384 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1384 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1383 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1383 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#650 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#650 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#650 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock sb_writers#3 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#650 irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex &rq->__lock irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#643 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#643 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#643 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rcu_read_lock &data->fib_event_queue_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1385 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1385 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1384 irq_context: 0 misc_mtx (wq_completion)nfc5_nci_cmd_wq#59 irq_context: 0 misc_mtx (wq_completion)nfc5_nci_cmd_wq#59 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc5_nci_cmd_wq#59 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1384 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#639 irq_context: 0 cb_lock rcu_read_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#639 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#639 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#237 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#237 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#237 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#237 irq_context: 0 &dev->clientlist_mutex &helper->lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#234 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#234 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#234 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#232 irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#232 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#232 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1386 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1386 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_hook_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1386 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1386 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1385 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1385 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1385 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#651 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#651 &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex sched_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 crngs.lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#651 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#651 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#644 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#640 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#640 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#640 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1387 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1387 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1386 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1386 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1388 irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &ei->socket.wq.wait &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock key irq_context: 0 kn->active#4 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock pcpu_lock irq_context: 0 kn->active#4 rcu_read_lock &obj_hash[i].lock irq_context: 0 kn->active#4 rcu_read_lock pool_lock#2 irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock percpu_counters_lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1388 irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock k-sk_lock-AF_INET6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1387 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#652 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#652 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#645 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#645 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#645 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#641 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1387 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#238 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#238 &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 fs_reclaim irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#238 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#238 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#235 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#233 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1389 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1389 &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock &____s->seqcount#7 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock rcu_read_lock &ct->lock irq_context: 0 sk_lock-AF_INET &list->lock#22 irq_context: 0 sk_lock-AF_INET krc.lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_INET krc.lock &base->lock irq_context: 0 sk_lock-AF_INET krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1389 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1389 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1388 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1388 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#653 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#653 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#653 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 fs_reclaim &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 devlinks.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &xa->xa_lock#19 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &xa->xa_lock#19 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &c->lock irq_context: 0 rtnl_mutex rcu_read_lock batched_entropy_u8.lock irq_context: 0 rtnl_mutex rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#653 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#646 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#646 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#646 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#642 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#642 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#642 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1390 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1390 &rq->__lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1390 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex rcu_state.exp_mutex pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1390 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1389 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1389 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1389 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1389 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#239 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#239 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#236 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#234 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#234 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#234 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1391 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1391 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1390 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1390 &rq->__lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &n->list_lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1390 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1390 &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET &sctp_port_hashtable[i].lock batched_entropy_u8.lock irq_context: 0 sk_lock-AF_INET &sctp_port_hashtable[i].lock kfence_freelist_lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 &sctp_port_hashtable[i].lock &meta->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->sctp.addr_wq_lock slock-AF_INET/1 &sctp_port_hashtable[i].lock kfence_freelist_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1390 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1390 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1390 &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &base->lock irq_context: 0 rtnl_mutex bus_type_sem &rq->__lock irq_context: 0 &dev->filelist_mutex rcu_node_0 irq_context: 0 &dev->filelist_mutex &rcu_state.expedited_wq irq_context: 0 &dev->filelist_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &dev->filelist_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->filelist_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &base->lock &obj_hash[i].lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1390 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#654 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#155 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#155 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#654 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#654 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#654 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#647 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#643 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#643 &rq->__lock irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 kernfs_idr_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#643 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1392 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1392 &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1392 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1391 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#387 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#121 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1391 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#605 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#605 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#240 irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->info_timer)->work) irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->disc_work)->work) irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->auto_accept_work)->work) irq_context: 0 &mm->mmap_lock rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->idle_work)->work) irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &x->wait#3 irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &rq->__lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#37 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#37 irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#185 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 fs_reclaim stock_lock irq_context: 0 fs_reclaim key irq_context: 0 fs_reclaim pcpu_lock irq_context: 0 fs_reclaim percpu_counters_lock irq_context: 0 fs_reclaim pcpu_lock stock_lock irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1375 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1252 irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock key irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock pcpu_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock percpu_counters_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_read_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &k->list_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#7 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu stock_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu key irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu pcpu_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu percpu_counters_lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu pcpu_lock stock_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex pcpu_alloc_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 &data->open_mutex pcpu_alloc_mutex &rq->__lock irq_context: 0 &data->open_mutex pcpu_alloc_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci3#6 irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) irq_context: 0 &data->open_mutex pcpu_alloc_mutex.wait_lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex rcu_state.exp_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) &list->lock#5 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1391 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1363 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &list->lock#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 &pipe->mutex/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock pool_lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock &ul->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rcu_read_lock &tb->tb6_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rcu_read_lock &tb->tb6_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rcu_read_lock &tb->tb6_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 &(&fn_net->fib_chain)->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 stack_depot_init_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex bpf_devs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex bpf_devs_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex bpf_devs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &hdev->req_lock &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex bpf_devs_lock rcu_read_lock rhashtable_bucket irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &xa->xa_lock#4 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 &p->lock &of->mutex kn->active#16 &rq->__lock irq_context: 0 &p->lock &of->mutex kn->active#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &devlink_port->type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex net_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &tn->lock irq_context: 0 wq_pool_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#13 remove_cache_srcu &c->lock irq_context: 0 kn->active#13 remove_cache_srcu &n->list_lock irq_context: 0 kn->active#13 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &x->wait#9 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex gdp_mutex irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &ep->mtx &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex gdp_mutex lock irq_context: 0 misc_mtx remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex bus_type_sem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex sysfs_symlink_target_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &____s->seqcount irq_context: 0 nfc_devlist_mutex &dev->mutex &rq->__lock irq_context: 0 nfc_devlist_mutex &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex dpm_list_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) rcu_state.exp_wake_mutex pool_lock#2 irq_context: 0 &kcov->lock kcov_remote_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &k->k_lock irq_context: 0 &data->open_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex subsys mutex#20 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &dir->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex dev_hotplug_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex dev_base_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex input_pool.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &tbl->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex sysctl_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex failover_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &idev->mc_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &pnettable->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex smc_ib_devices.mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex &vn->sock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#72 rtnl_mutex lock kernfs_idr_lock &c->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) &rq->__lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1363 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1363 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1364 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock kfence_freelist_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock &meta->lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1365 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1365 &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1365 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 &xt[i].mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &xt[i].mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1365 irq_context: 0 rtnl_mutex gdp_mutex &cfs_rq->removed.lock irq_context: 0 rtnl_mutex gdp_mutex &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#230 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#230 irq_context: 0 &p->lock remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1368 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#240 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1391 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#237 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#235 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#655 irq_context: 0 &dev->mode_config.idr_mutex &c->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#2 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &list->lock#7 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->req_wait_q irq_context: 0 rtnl_mutex pcpu_alloc_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&ht->run_work) &ht->mutex &ht->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#655 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#655 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 &n->list_lock &c->lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) cgroup_threadgroup_rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#655 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#227 irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#606 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#207 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1325 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1326 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#648 irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#644 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#644 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#644 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 &file->fbs_lock &rq->__lock irq_context: 0 &file->fbs_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#148 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 &sb->s_type->i_mutex_key#10 &rnp->exp_wq[0] irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mode_config.idr_mutex &rq->__lock irq_context: 0 misc_mtx cpu_hotplug_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mode_config.idr_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#600 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1327 irq_context: 0 rtnl_mutex &rnp->exp_wq[1] irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#933 irq_context: 0 rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#933 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#958 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1393 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1305 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1305 irq_context: 0 (wq_completion)writeback (work_completion)(&(&wb->dwork)->work) &type->s_umount_key#40 lock#4 &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#592 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#585 irq_context: 0 rtnl_mutex &wg->device_update_lock rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &tbl->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &n->lock irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &lruvec->lru_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu &rq->__lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex remove_cache_srcu quarantine_lock irq_context: softirq &(&nsim_dev->trap_data->trap_report_dw)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1327 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1328 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#609 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &mm->mmap_lock rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem kernfs_idr_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#211 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#211 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#211 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#611 irq_context: 0 (wq_completion)wg-kex-wg2#146 irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1393 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1393 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1393 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1392 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1392 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1392 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1392 irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1392 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1392 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) rcu_node_0 irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#656 irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#241 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#659 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#659 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#387 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#387 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#381 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#381 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#118 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1054 irq_context: 0 &hdev->req_lock (wq_completion)hci0#10 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#117 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#988 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#988 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#988 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#988 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#978 irq_context: 0 sb_writers#5 &sb->s_type->i_lock_key bit_wait_table + i irq_context: 0 &sb->s_type->i_mutex_key#10 &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#979 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#988 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#979 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#979 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#988 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#988 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#989 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#397 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#979 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#397 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#979 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem remove_cache_srcu &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#391 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#979 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#391 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#391 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#389 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#385 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#122 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#980 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#122 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#119 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#118 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#989 irq_context: 0 sk_lock-AF_PACKET rcu_state.exp_mutex &rnp->exp_wq[0] irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#989 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#989 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#989 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#989 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#990 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#990 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#398 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#398 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#398 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#398 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#392 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#390 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#390 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#390 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#990 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#990 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#990 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#991 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#399 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#399 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#393 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#391 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#991 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#377 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#971 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#971 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#971 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#971 irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq &(&ipvs->defense_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#971 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#973 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#381 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#379 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#974 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#974 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#974 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#974 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#975 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#382 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#380 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#380 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#380 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#116 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#116 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#116 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#975 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#975 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#975 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#116 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#113 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#976 irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#389 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#389 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#383 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#381 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#381 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#976 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#390 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#390 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#384 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#384 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#384 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#382 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#977 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#977 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#977 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#991 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#977 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#400 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#400 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#400 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#991 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#992 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#400 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#394 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#392 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#992 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) &base->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#992 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#992 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#993 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#401 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#401 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#395 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#395 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#395 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#393 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#402 irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#402 irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim &obj_hash[i].lock irq_context: 0 &type->i_mutex_dir_key#4 fs_reclaim pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#396 irq_context: 0 cb_lock genl_mutex pcpu_alloc_mutex &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rfkill_global_mutex uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#993 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#993 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#977 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#394 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#993 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#994 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#123 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#123 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#123 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#123 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#120 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#119 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#18 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &pool->lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#18 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#994 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#994 &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &base->lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock _xmit_ETHER &local->filter_lock &n->list_lock &c->lock irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#994 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#994 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock &n->list_lock &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#994 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#995 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#124 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#124 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#124 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &sem->wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#124 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#121 irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex pcpu_lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex percpu_counters_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex pcpu_lock stock_lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &cfs_rq->removed.lock irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem css_set_lock cgroup_file_kn_lock kernfs_notify_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &dev->mode_config.mutex crtc_ww_class_acquire crtc_ww_class_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events free_ipc_work &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#656 irq_context: 0 &mm->mmap_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#649 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#645 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#659 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx remove_cache_srcu pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#639 irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1407 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss &obj_hash[i].lock pool_lock irq_context: 0 &pipe->mutex/1 &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#673 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#673 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1413 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1414 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#667 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#667 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock key irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock pcpu_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock percpu_counters_lock irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &bridge_netdev_addr_lock_key &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1419 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#13 irq_context: 0 &dev->mutex device_links_lock &rq->__lock irq_context: 0 &dev->mutex device_links_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex (pm_chain_head).rwsem &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc14_nci_tx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#681 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#681 irq_context: 0 sk_lock-AF_INET rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 sk_lock-AF_INET rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 sk_lock-AF_INET rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 &ndev->req_lock (wq_completion)nfc40_nci_cmd_wq#3 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc40_nci_cmd_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#252 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#252 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#254 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#254 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#231 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1441 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1447 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1447 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1458 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#707 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#707 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &meta->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 jbd2_handle rcu_node_0 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock kfence_freelist_lock irq_context: 0 &data->open_mutex remove_cache_srcu &rq->__lock irq_context: 0 &data->open_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#275 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#275 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex gdp_mutex irq_context: 0 (wq_completion)nfc32_nci_tx_wq#11 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 misc_mtx remove_cache_srcu rcu_node_0 irq_context: 0 misc_mtx remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 misc_mtx &dev->mutex rcu_node_0 irq_context: 0 misc_mtx &dev->mutex &rcu_state.expedited_wq irq_context: 0 misc_mtx &dev->mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx &dev->mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx &dev->mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem rcu_node_0 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rcu_state.expedited_wq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: softirq (&p->timer) &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 nfc_devlist_mutex key irq_context: 0 nfc_devlist_mutex pcpu_lock irq_context: 0 nfc_devlist_mutex percpu_counters_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#37 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex gdp_mutex &c->lock irq_context: 0 (wq_completion)hci1#3 irq_context: 0 rtnl_mutex wq_pool_mutex &wq->mutex &rq->__lock irq_context: 0 rtnl_mutex wq_pool_mutex &wq->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex wq_pool_mutex.wait_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex wq_pool_mutex.wait_lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1592 irq_context: softirq (&ndev->rs_timer) rcu_read_lock rcu_read_lock rcu_read_lock_bh pcpu_lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1600 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#93 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#330 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#330 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#331 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#326 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#134 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1090 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#334 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#883 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#884 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#884 irq_context: 0 (wq_completion)wg-kex-wg0#159 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#886 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#887 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#887 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#887 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock batched_entropy_u8.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#889 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#889 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#334 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#334 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#895 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback rcu_read_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback rcu_read_lock rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#895 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#899 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &lock->wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#901 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#901 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#903 irq_context: 0 &data->open_mutex rcu_node_0 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#925 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#959 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#927 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#926 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#582 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#926 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#197 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#926 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#926 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#926 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#930 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#933 &rq->__lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock genl_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#111 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#379 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#379 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#111 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#111 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#961 irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 &mm->mmap_lock remove_cache_srcu &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#961 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#961 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#6 rcu_node_0 irq_context: 0 sb_writers#6 &rcu_state.expedited_wq irq_context: 0 sb_writers#6 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#6 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#6 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#962 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#962 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#962 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#962 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#963 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#963 &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 rcu_read_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#963 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#963 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#197 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#197 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1306 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1307 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1307 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1307 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1307 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#593 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock &obj_hash[i].lock irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &bgl->locks[i].lock pool_lock#2 irq_context: 0 sb_internal jbd2_handle &ei->i_data_sem &mapping->i_private_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#593 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#203 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#203 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#586 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1308 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#200 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#200 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1308 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#200 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1308 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#583 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#200 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#200 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#198 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#121 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#121 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#120 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#995 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#995 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#995 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex fill_pool_map-wait-type-override &rq->__lock irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock tk_core.seq.seqcount irq_context: 0 sk_lock-AF_INET rcu_read_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &list->lock#12 irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &meta->lock irq_context: 0 &dev->clientlist_mutex pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#995 irq_context: 0 sk_lock-AF_INET krc.lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sk_lock-AF_INET krc.lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#995 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#996 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#996 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#403 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#403 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#403 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#403 irq_context: 0 &data->open_mutex rfkill_global_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#397 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#160 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#160 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#160 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#996 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#997 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#998 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#998 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#999 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#999 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#999 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#399 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock irq_context: 0 kn->active#15 &n->list_lock irq_context: 0 kn->active#15 &n->list_lock &c->lock irq_context: 0 kn->active#15 &rq->__lock irq_context: 0 kn->active#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1000 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1001 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1001 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1001 irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu &rq->__lock irq_context: 0 misc_mtx &dev->mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex gdp_mutex &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1002 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1003 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1003 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1003 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#123 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#122 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1005 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1005 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1006 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1006 &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1007 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1007 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1007 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1008 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#409 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#409 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 mmu_notifier_invalidate_range_start &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 mmu_notifier_invalidate_range_start &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1010 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1010 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1011 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1012 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1013 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1013 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1013 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1013 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1015 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock crngs.lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#403 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1018 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1018 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1019 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1019 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss rcu_read_lock pool_lock#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 fill_pool_map-wait-type-override &c->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &sig->cred_guard_mutex sb_writers#3 fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 fs_reclaim &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13/4 fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq &(&forw_packet_aggr->delayed_work)->timer rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex fs_reclaim &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1019 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1019 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1020 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1020 irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#231 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#640 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1367 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1394 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1394 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1394 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1394 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1393 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1393 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1393 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex (inetaddr_chain).rwsem &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock &obj_hash[i].lock irq_context: 0 sk_lock-AF_NETLINK rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg0#132 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1393 irq_context: 0 cb_lock genl_mutex bus_type_sem &rq->__lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle &journal->j_state_lock &journal->j_wait_commit &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#72 &nsim_trap_data->trap_lock &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1050 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1050 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1050 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1050 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1050 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1050 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1051 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1051 &rq->__lock irq_context: hardirq &fq->mq_flush_lock bit_wait_table + i &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1051 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1052 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1052 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1052 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 tomoyo_ss remove_cache_srcu irq_context: 0 sb_writers#4 tomoyo_ss remove_cache_srcu quarantine_lock irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] rcu_node_0 irq_context: 0 kn->active#46 &kernfs_locks->open_file_mutex[count] &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1052 irq_context: 0 rcu_read_lock_bh _xmit_ETHER#2 irq_context: 0 rcu_read_lock_bh _xmit_ETHER#2 &obj_hash[i].lock irq_context: 0 rcu_read_lock_bh _xmit_ETHER#2 pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1052 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1053 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#425 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#425 irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &n->list_lock irq_context: 0 sk_lock-AF_BLUETOOTH-BTPROTO_HCI &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#419 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#417 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1053 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1053 irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1053 irq_context: 0 cb_lock genl_mutex bus_type_sem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &n->list_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1054 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1055 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#426 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &fsnotify_mark_srcu pool_lock#2 irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#426 irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#420 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#418 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1055 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1055 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1055 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1055 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1055 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1056 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss mount_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3 tomoyo_ss mount_lock rcu_read_lock rename_lock.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1056 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &c->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &obj_hash[i].lock irq_context: 0 tty_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events fqdir_free_work rcu_state.barrier_mutex rcu_state.barrier_mutex.wait_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1056 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1056 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1057 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1057 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1057 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4#5 irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#7 irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->id_addr_timer)->work) irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &x->wait#3 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->info_timer)->work) irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->disc_work)->work) irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->auto_accept_work)->work) irq_context: 0 &dev->master_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->idle_work)->work) irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &x->wait#3 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci4#6 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#657 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) &list->lock#5 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &list->lock#7 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock kernfs_idr_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#657 irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock tk_core.seq.seqcount irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock hrtimer_bases.lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock tk_core.seq.seqcount irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock &(&vblank->seqlock)->lock &____s->seqcount#5 irq_context: softirq net/wireless/reg.c:236 irq_context: softirq net/wireless/reg.c:236 rcu_read_lock &pool->lock irq_context: softirq net/wireless/reg.c:236 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: softirq net/wireless/reg.c:236 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)events_power_efficient (reg_check_chans).work rtnl_mutex irq_context: 0 (wq_completion)events_power_efficient (reg_check_chans).work rtnl_mutex &rdev->wiphy.mtx irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1368 &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock deferred_probe_mutex irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock device_links_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 namespace_sem &n->list_lock irq_context: 0 namespace_sem &n->list_lock &c->lock irq_context: 0 rcu_state.barrier_mutex.wait_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER &n->list_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock _xmit_ETHER &n->list_lock &c->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#9 &of->mutex kn->active#51 &rq->__lock irq_context: 0 sb_writers#9 &of->mutex kn->active#51 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci1 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx &rdev->bss_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#650 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#650 &rq->__lock irq_context: 0 sb_writers#9 remove_cache_srcu irq_context: 0 sb_writers#9 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#9 remove_cache_srcu &c->lock irq_context: 0 sb_writers#9 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#9 remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &wg->device_update_lock rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#650 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx rcu_read_lock rcu_read_lock &sta->rate_ctrl_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#646 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#241 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#241 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#241 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#241 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#238 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1611 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1611 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#238 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#238 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1368 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1377 irq_context: 0 &sig->cred_guard_mutex stock_lock irq_context: 0 &sig->cred_guard_mutex pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1377 &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock pcpu_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx rcu_read_lock percpu_counters_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1377 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1376 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1376 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1376 irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#150 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#147 irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.barrier_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1378 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1378 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1378 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1377 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex quarantine_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1377 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1379 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1379 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1379 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1378 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)cfg80211 (work_completion)(&rdev->event_work) &rdev->wiphy.mtx &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#24 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1178 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1178 &rq->__lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim &cfs_rq->removed.lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim &obj_hash[i].lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] fs_reclaim pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1182 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#158 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#158 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#501 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#501 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#502 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1190 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1190 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#498 irq_context: 0 &sb->s_type->i_mutex_key#10 fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1193 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#29 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)bond0#79 irq_context: 0 (wq_completion)bond0#79 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg1#156 irq_context: 0 rtnl_mutex &ndev->lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem rcu_node_0 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock &anon_vma->rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#29 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex dev_addr_sem team->team_lock_key#83 irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci2#9 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &list->lock#7 irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 lweventlist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 lweventlist_lock &dir->lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1205 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#529 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#172 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#928 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1230 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#544 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1240 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#185 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#977 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#117 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#117 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &base->lock &obj_hash[i].lock irq_context: 0 &dev_instance->mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock kernfs_idr_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &x->wait#9 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &pipe->mutex/1 &mm->mmap_lock rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock deferred_probe_mutex irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock device_links_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &c->lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex &mm->mmap_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &tsk->futex_exit_mutex &mm->mmap_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq &(&bat_priv->orig_work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &ul->lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx gdp_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 &mm->mmap_lock remove_cache_srcu key irq_context: 0 vlan_ioctl_mutex rtnl_mutex rcu_read_lock &rq->__lock irq_context: 0 vlan_ioctl_mutex rtnl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx remove_cache_srcu pool_lock#2 irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 fill_pool_map-wait-type-override &c->lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 fill_pool_map-wait-type-override &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 fill_pool_map-wait-type-override pool_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 &____s->seqcount#2 irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET6 &pcp->lock &zone->lock irq_context: 0 &sb->s_type->i_mutex_key#3 kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 kn->active#46 &n->list_lock irq_context: 0 kn->active#46 &n->list_lock &c->lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci2#8 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &rq->__lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex rcu_read_lock &net->sctp.local_addr_lock &net->sctp.addr_wq_lock &____s->seqcount irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&cache_cleaner)->work) &rq->__lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &n->list_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &c->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 &wq->mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#65 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#641 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex dpm_list_mtx &rq->__lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#611 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#611 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &wq->mutex &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1331 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1331 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#213 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#213 irq_context: 0 &wq->mutex pool_lock#2 irq_context: 0 misc_mtx rfkill_global_mutex &data->mtx &rq->__lock irq_context: 0 misc_mtx rfkill_global_mutex &data->mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1332 irq_context: 0 pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex nf_hook_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1332 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1332 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1332 &rq->__lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#7 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 cb_lock genl_mutex uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &rq->__lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_INET6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex pcpu_lock irq_context: 0 pernet_ops_rwsem k-sk_lock-AF_RXRPC &rxnet->local_mutex percpu_counters_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &c->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->tx_work) &rq->__lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->tx_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->tx_work) &list->lock#5 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci3#6 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)hci3#6 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 &dev->master_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#397 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#395 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#996 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#996 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#996 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#146 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1378 irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock stock_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#996 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#997 irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#125 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#125 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#125 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#588 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#588 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#199 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#199 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#199 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#199 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#588 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#581 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#581 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#581 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#196 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#578 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#578 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#578 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex uevent_sock_mutex.wait_lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#194 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#200 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1300 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1300 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#197 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#195 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#195 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#195 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1301 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#45 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#45 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#41 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#589 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#589 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#41 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#582 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#579 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu rcu_node_0 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#125 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu irq_context: 0 cb_lock genl_mutex remove_cache_srcu quarantine_lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &c->lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex remove_cache_srcu &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#404 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#404 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1301 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1301 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex net_rwsem nl_table_wait.lock irq_context: 0 cb_lock rtnl_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &mm->mmap_lock &anon_vma->rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &c->lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock pool_lock#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET &rq->__lock cpu_asid_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock pcpu_lock stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1304 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1304 irq_context: 0 sb_writers#8 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1304 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1304 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1305 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1308 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1308 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1309 irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#46 irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx krc.lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 rcu_read_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &simple_offset_xa_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#42 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#42 irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) sched_map-wait-type-override pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#594 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#594 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#594 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#594 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#587 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#584 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#204 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#204 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#204 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#201 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#199 irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 cb_lock genl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock &dentry->d_lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#199 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex tomoyo_ss &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim &cfs_rq->removed.lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 fs_reclaim pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1332 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#214 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1370 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#642 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#642 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1371 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1371 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex krc.lock &obj_hash[i].lock irq_context: 0 &dev->mutex &rcu_state.expedited_wq irq_context: 0 &dev->mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &dev->mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex krc.lock &base->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1372 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1373 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#236 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#236 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#236 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#658 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1395 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rdev->bss_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#613 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#614 irq_context: 0 (wq_completion)wg-kex-wg1#153 irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 &p->lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#608 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#658 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#658 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1395 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1395 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#658 irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#60 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#60 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#651 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#55 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#647 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#55 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1395 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1394 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1394 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1396 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1396 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1395 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1395 irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg1#78 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg0#79 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#659 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#652 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#648 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#648 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#648 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1397 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1397 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1396 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1396 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1398 irq_context: 0 &dev->mutex uevent_sock_mutex batched_entropy_u8.lock irq_context: 0 &dev->mutex uevent_sock_mutex kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1398 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1398 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1398 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#647 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1397 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1397 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1397 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1397 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#660 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#660 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#653 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1399 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1398 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#650 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1400 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1400 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1400 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1402 irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 kn->active#47 &kernfs_locks->open_file_mutex[count] fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#47 &lock->wait_lock irq_context: 0 kn->active#47 &p->pi_lock irq_context: 0 kn->active#47 &p->pi_lock &rq->__lock irq_context: 0 kn->active#47 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#243 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#244 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#244 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1405 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1406 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#245 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#245 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#245 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#61 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1406 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#15 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#10 irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#154 (work_completion)(&peer->transmit_handshake_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 crtc_ww_class_mutex &mm->mmap_lock rcu_read_lock rcu_node_0 irq_context: 0 crtc_ww_class_mutex &mm->mmap_lock rcu_read_lock &rq->__lock irq_context: 0 crtc_ww_class_mutex &mm->mmap_lock rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex pool_lock#2 irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &obj_hash[i].lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &dev->mode_config.connector_list_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter fs_reclaim irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter pool_lock#2 irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &c->lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &crtc->commit_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex reservation_ww_class_mutex irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &x->wait#15 irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex tk_core.seq.seqcount irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vbl_lock &dev->vblank_time_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &vkms_out->lock &dev->event_lock &dev->vblank_time_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &base->lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#155 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1408 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1408 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1408 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1409 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#671 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1309 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#47 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#671 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#664 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#660 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1409 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1411 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#676 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1418 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1418 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#248 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1419 irq_context: 0 &dev->master_mutex uevent_sock_mutex &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#678 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1421 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#679 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#680 irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &rq->__lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &n->list_lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1423 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1423 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1424 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1424 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1425 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1425 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1425 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1425 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1424 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#13 irq_context: 0 &dev->mutex rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 misc_mtx (wq_completion)nfc13_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1426 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#19 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#8 irq_context: 0 misc_mtx (wq_completion)nfc21_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#10 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &hsr->seqnr_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc22_nci_tx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#674 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#674 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#24 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#16 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc13_nci_rx_wq#20 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#12 irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc50_nci_tx_wq irq_context: 0 (wq_completion)nfc35_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1429 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#249 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#684 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1430 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#254 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#254 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#32 irq_context: 0 pernet_ops_rwsem stock_lock irq_context: 0 pernet_ops_rwsem key irq_context: 0 pernet_ops_rwsem pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1432 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1433 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1433 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1433 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock kfence_freelist_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &nsim_trap_data->trap_lock &meta->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1436 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1435 irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &p->pi_lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1435 &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 br_ioctl_mutex rtnl_mutex fs_reclaim irq_context: 0 br_ioctl_mutex rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 br_ioctl_mutex rtnl_mutex &c->lock irq_context: 0 br_ioctl_mutex rtnl_mutex stack_depot_init_mutex irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 br_ioctl_mutex rtnl_mutex crngs.lock irq_context: 0 rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key &n->list_lock irq_context: 0 rtnl_mutex &idev->mc_lock &batadv_netdev_addr_lock_key &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#253 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#689 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#689 &rq->__lock irq_context: 0 br_ioctl_mutex rtnl_mutex krc.lock irq_context: 0 br_ioctl_mutex rtnl_mutex &dir->lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#689 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#689 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#682 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1439 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1439 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#691 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#691 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1440 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex nf_hook_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1441 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#694 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1442 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1444 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#259 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#259 &rq->__lock irq_context: 0 misc_mtx pcpu_alloc_mutex rcu_node_0 irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &ul->lock#2 &c->lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#259 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1444 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#698 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#70 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1448 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1448 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1449 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1449 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1448 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1448 &rq->__lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 &mm->mmap_lock &mapping->i_mmap_rwsem fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1448 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#263 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1450 irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#701 irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#701 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#694 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#695 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem rcu_read_lock pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#703 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1453 irq_context: 0 &data->open_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &data->open_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock pool_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1467 irq_context: 0 &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#716 irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock irq_context: 0 misc_mtx nl_table_wait.lock &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#716 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#716 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#716 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1471 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#16 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex gdp_mutex lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#12 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#12 &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1090 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1091 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)netns net_cleanup_work &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work &rq->__lock irq_context: 0 cb_lock genl_mutex batched_entropy_u8.lock irq_context: 0 cb_lock genl_mutex kfence_freelist_lock irq_context: 0 cb_lock genl_mutex &meta->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex bus_type_sem irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)bond0#76 irq_context: 0 (wq_completion)bond0#76 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#76 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#76 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#76 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 br_ioctl_mutex br_ioctl_mutex.wait_lock irq_context: 0 br_ioctl_mutex &rq->__lock irq_context: 0 br_ioctl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 br_ioctl_mutex rcu_read_lock &rq->__lock irq_context: 0 br_ioctl_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1487 irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex drm_connector_list_iter rcu_node_0 irq_context: 0 br_ioctl_mutex.wait_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &rq->__lock cpu_asid_lock irq_context: 0 br_ioctl_mutex &cfs_rq->removed.lock irq_context: 0 br_ioctl_mutex &obj_hash[i].lock irq_context: 0 br_ioctl_mutex pool_lock#2 irq_context: 0 br_ioctl_mutex rtnl_mutex &n->list_lock irq_context: 0 br_ioctl_mutex rtnl_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex sysfs_symlink_target_lock irq_context: 0 br_ioctl_mutex rtnl_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex lock kernfs_idr_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex dpm_list_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex uevent_sock_mutex irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1493 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1493 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1494 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1544 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1489 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1545 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#736 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#728 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1504 irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &cfs_rq->removed.lock irq_context: 0 crtc_ww_class_acquire crtc_ww_class_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#37 irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc49_nci_cmd_wq#2 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#10 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)hci0#10 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 &mm->mmap_lock remove_cache_srcu pcpu_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu percpu_counters_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc45_nci_tx_wq#2 irq_context: 0 (wq_completion)nfc38_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#767 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1547 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#774 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1551 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#781 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#781 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1571 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1572 irq_context: 0 &dev->mutex subsys mutex#40 &rq->__lock irq_context: 0 &dev->mutex subsys mutex#40 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1572 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rcu_read_lock &tb->tb6_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1585 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1588 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1611 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1594 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1597 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1597 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#691 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1453 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1451 irq_context: 0 sk_lock-AF_PHONET port_mutex#2 rcu_read_lock rcu_node_0 irq_context: 0 sk_lock-AF_PHONET port_mutex#2 rcu_read_lock &rq->__lock irq_context: 0 sk_lock-AF_PHONET port_mutex#2 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 sb_internal jbd2_handle &sbi->s_orphan_lock rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 sb_internal rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1597 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1597 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1598 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1598 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#78 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1598 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1598 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1611 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1609 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1609 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1609 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#790 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#790 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1600 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1600 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1600 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#90 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#790 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#790 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1614 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1616 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#267 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#879 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#879 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#267 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#89 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &p->pi_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#880 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#880 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#880 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#325 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#324 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#325 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#326 &rq->__lock irq_context: 0 rtnl_mutex (inetaddr_chain).rwsem rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock crngs.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock &nsim_dev->fa_cookie_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#438 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg2#79 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#90 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg1#77 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg1#153 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg1#154 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg0#78 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#336 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)bond0#79 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#884 irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#886 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#886 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#886 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#886 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#887 irq_context: 0 nfc_devlist_mutex deferred_probe_mutex &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#888 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#890 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#890 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#95 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#889 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#890 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#339 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#339 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#334 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#891 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#891 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#892 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#892 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#893 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#894 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#896 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#896 irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&peer->transmit_packet_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#897 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#897 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#343 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#901 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#901 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#901 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#928 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work rcu_state.barrier_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#900 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#901 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#903 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#902 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#902 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &ei->i_es_lock key#7 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 rcu_node_0 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rcu_state.expedited_wq irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#902 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &cfs_rq->removed.lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 &obj_hash[i].lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 &sb->s_type->i_mutex_key#13 pool_lock#2 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#341 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#346 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 remove_cache_srcu &____s->seqcount irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#346 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rcu_read_lock &tb->tb6_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &(&fn_net->fib_chain)->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 stack_depot_init_mutex irq_context: 0 (wq_completion)events_power_efficient (work_completion)(&(&tbl->gc_work)->work) &tbl->lock batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#161 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#161 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#161 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#161 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#158 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex bpf_devs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#157 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 fs_reclaim irq_context: 0 (wq_completion)nfc4_nci_tx_wq#155 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1194 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy162 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1194 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1195 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#508 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#508 &rq->__lock irq_context: 0 misc_mtx &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 &net->ipv4.ra_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#508 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)afs (work_completion)(&net->cells_manager) &rq->__lock irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex quarantine_lock irq_context: softirq &(&bat_priv->mcast.work)->timer rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#508 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#502 irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#502 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#502 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#500 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 devlinks.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &c->lock irq_context: 0 misc_mtx &dev->mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)afs (work_completion)(&net->fs_manager) &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &xa->xa_lock#19 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &xa->xa_lock#19 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock lock kernfs_idr_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex lock kernfs_idr_lock pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->nc.work)->work) fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy162 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1195 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1195 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1195 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1196 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex bpf_devs_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex bpf_devs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex bpf_devs_lock rcu_read_lock rhashtable_bucket irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock &rnp->exp_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.exp_mutex rcu_state.exp_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &rq->__lock cpu_asid_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#2 irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &xa->xa_lock#4 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &devlink_port->type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex net_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &tn->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &x->wait#9 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex gdp_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex gdp_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex gdp_mutex lock kernfs_idr_lock &c->lock irq_context: 0 rtnl_mutex devnet_rename_sem rcu_node_0 irq_context: 0 rtnl_mutex devnet_rename_sem &rcu_state.expedited_wq irq_context: 0 rtnl_mutex devnet_rename_sem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 rtnl_mutex devnet_rename_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rcu_state.expedited_wq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock rcu_state.barrier_mutex rcu_state.barrier_mutex.wait_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &root->kernfs_rwsem irq_context: 0 (wq_completion)events_unbound (work_completion)(&port->bc_work) quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex bus_type_sem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex sysfs_symlink_target_lock irq_context: 0 pernet_ops_rwsem nl_table_lock nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex dpm_list_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 &kcov->lock kcov_remote_lock &n->list_lock irq_context: 0 &kcov->lock kcov_remote_lock &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem &n->list_lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem &n->list_lock &c->lock irq_context: 0 &sb->s_type->i_mutex_key#17 namespace_sem &rq->__lock irq_context: 0 bt_proto_lock &____s->seqcount#2 irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex &rq->__lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex rfkill_global_mutex uevent_sock_mutex.wait_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &p->pi_lock irq_context: 0 &data->open_mutex rfkill_global_mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex rfkill_global_mutex &p->pi_lock &rq->__lock irq_context: 0 &data->open_mutex rfkill_global_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex subsys mutex#20 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &dir->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex dev_hotplug_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#156 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex dev_base_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex input_pool.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &tbl->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex sysctl_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex failover_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex proc_subdir_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &idev->mc_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &pnettable->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex smc_ib_devices.mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &vn->sock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex lock kernfs_idr_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#928 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 sb_writers#3 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#927 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#927 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#927 irq_context: 0 pernet_ops_rwsem rtnl_mutex dpm_list_mtx &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#929 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#355 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#355 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#350 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#350 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#350 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#348 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#348 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#348 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#928 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#928 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#928 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#930 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#929 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#929 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#929 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#931 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#931 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#931 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#47 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#47 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1309 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1309 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1310 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#47 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#43 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#380 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#380 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#965 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#966 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#595 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#382 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#383 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#967 irq_context: 0 crtc_ww_class_acquire &p->pi_lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#968 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#968 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#969 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#595 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#583 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#583 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#583 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1293 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#577 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1293 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#574 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#574 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#574 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1294 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#198 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#198 &rq->__lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock key irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock pcpu_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock percpu_counters_lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 smack_known_lock pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#198 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#198 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#195 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#193 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#584 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#584 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#584 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#578 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#575 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1295 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1295 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#585 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#585 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#585 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#585 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1297 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1297 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 stock_lock rcu_read_lock per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1297 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1297 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1298 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1298 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1298 irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1298 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1299 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1302 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#590 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#590 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#583 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#583 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#583 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#580 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1302 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1302 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1302 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1303 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#201 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#201 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#198 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#196 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1303 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1303 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1303 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#591 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#591 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#591 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#591 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#584 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#581 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1303 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#579 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#576 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#576 &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#576 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc2_nci_tx_wq#1296 irq_context: 0 misc_mtx (wq_completion)nfc2_nci_tx_wq#1296 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1303 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1304 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#595 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#43 irq_context: 0 misc_mtx (wq_completion)nfc2_nci_tx_wq#1296 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#595 irq_context: 0 misc_mtx (wq_completion)nfc2_nci_rx_wq#1295 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1296 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1296 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#585 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1296 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#205 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1296 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#205 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1296 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#205 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1296 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1296 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#205 irq_context: 0 nfc_devlist_mutex kn->active#4 rcu_read_lock &rq->__lock irq_context: 0 nfc_devlist_mutex kn->active#4 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1297 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#586 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#587 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#580 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#580 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#580 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#577 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#202 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#200 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1310 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1310 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1310 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1311 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#596 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#596 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#596 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#589 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#586 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1311 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1311 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1311 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &rcu_state.expedited_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1311 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1311 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1311 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1311 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#206 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#206 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#24 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#24 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#24 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#24 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#24 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_tx_wq#24 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#24 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#206 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#203 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#597 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#203 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#597 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#203 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#590 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#590 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#590 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#587 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#201 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#201 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#201 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1312 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#930 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1312 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#930 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1312 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1312 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1312 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1313 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1313 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#48 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#48 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#44 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#44 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1313 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1313 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1314 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#598 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#598 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#598 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#598 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#588 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#588 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#398 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#398 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#398 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#396 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#122 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#121 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#121 &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#121 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#19 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#997 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#997 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#998 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#998 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#998 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#998 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#998 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#999 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1000 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#405 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#83 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#405 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#397 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1000 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1000 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1001 irq_context: 0 (wq_completion)events (work_completion)(&(&krcp->monitor_work)->work) rcu_callback &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#407 irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1006 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#123 irq_context: 0 &sig->cred_guard_mutex sb_writers#3 mmu_notifier_invalidate_range_start pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1010 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1014 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1014 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1015 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#930 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1015 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1016 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#268 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#268 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#930 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#932 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#931 irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1016 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1016 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#10 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#934 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1017 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1018 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1020 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1020 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1021 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1021 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1022 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#935 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#936 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#937 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)events fqdir_free_work quarantine_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#361 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1022 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1022 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1022 &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1022 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#368 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#368 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#947 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1022 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1022 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1022 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1022 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1023 irq_context: 0 tomoyo_ss remove_cache_srcu &meta->lock irq_context: 0 tomoyo_ss remove_cache_srcu kfence_freelist_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1023 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1023 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1023 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1023 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1024 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1024 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1025 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1025 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu stock_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu pcpu_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1026 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1026 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1026 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1026 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1026 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1026 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1028 irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1029 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1029 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1029 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1030 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#412 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1030 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1030 irq_context: 0 sb_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1031 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1031 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1031 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1032 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rq->__lock &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock &meta->lock irq_context: 0 &hdev->req_lock &hdev->lock hci_cb_list_lock kfence_freelist_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1252 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1252 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#549 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1252 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#547 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1252 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1252 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1252 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1253 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1253 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1253 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1253 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1253 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1253 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1254 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#556 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#556 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#556 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#556 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#556 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#556 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1254 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1254 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#550 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1254 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1255 irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1255 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#548 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1255 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1255 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1255 irq_context: 0 &kernfs_locks->open_file_mutex[count] &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1256 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1256 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1257 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#557 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#557 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#557 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#551 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#551 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#551 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#549 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#189 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#189 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#186 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#184 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1257 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1257 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1257 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1257 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1257 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1258 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 &mm->mmap_lock &anon_vma->rwsem mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1258 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1258 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1258 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1258 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1258 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1258 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1258 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#550 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#550 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#550 &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#550 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#552 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#558 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#558 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#558 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1259 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1259 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1260 irq_context: 0 &mm->mmap_lock &anon_vma->rwsem per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1260 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1260 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#553 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#551 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1260 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1260 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1260 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1260 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1260 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1261 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#190 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#930 &rq->__lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &rq->__lock irq_context: 0 rcu_state.exp_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#930 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#190 irq_context: 0 &hdev->req_lock &hdev->lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#187 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#185 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1261 &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock &root->kernfs_rwsem kernfs_idr_lock pool_lock#2 irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock &n->list_lock irq_context: 0 rtnl_mutex rcu_read_lock &tb->tb6_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock &obj_hash[i].lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock &base->lock irq_context: 0 rtnl_mutex &idev->mc_lock _xmit_ETHER krc.lock &base->lock &obj_hash[i].lock irq_context: 0 &data->open_mutex cpu_hotplug_lock &rq->__lock irq_context: 0 &data->open_mutex cpu_hotplug_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &____s->seqcount irq_context: 0 &data->open_mutex cpu_hotplug_lock wq_pool_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex &root->kernfs_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &fsnotify_mark_srcu pool_lock#2 irq_context: 0 &data->open_mutex &sem->wait_lock irq_context: 0 &data->open_mutex &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &data->open_mutex &root->kernfs_rwsem &sem->wait_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1261 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#191 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#191 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#188 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#186 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#560 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#560 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#554 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#552 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1262 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1262 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1262 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1263 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#561 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#561 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#561 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bond0#76 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#76 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)bond0#76 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex.wait_lock irq_context: 0 (wq_completion)bond0#76 (work_completion)(&(&slave->notify_work)->work) &p->pi_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#561 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#555 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#555 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#555 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#553 irq_context: 0 &data->open_mutex &sb->s_type->i_mutex_key#3 &____s->seqcount#2 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#553 &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#76 irq_context: 0 rtnl_mutex team->team_lock_key#76 &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#76 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#76 fs_reclaim irq_context: 0 rtnl_mutex team->team_lock_key#76 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex team->team_lock_key#76 netpoll_srcu irq_context: 0 rtnl_mutex team->team_lock_key#76 net_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#76 net_rwsem &list->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#76 &tn->lock irq_context: 0 rtnl_mutex team->team_lock_key#76 _xmit_ETHER irq_context: 0 rtnl_mutex team->team_lock_key#76 &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#76 input_pool.lock irq_context: 0 rtnl_mutex team->team_lock_key#76 &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#76 rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#76 &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#76 nl_table_lock irq_context: 0 rtnl_mutex team->team_lock_key#76 nl_table_wait.lock irq_context: 0 rtnl_mutex team->team_lock_key#76 rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex team->team_lock_key#76 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#76 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex team->team_lock_key#76 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#76 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#76 &in_dev->mc_tomb_lock irq_context: 0 rtnl_mutex team->team_lock_key#76 &im->lock irq_context: 0 rtnl_mutex team->team_lock_key#76 cbs_list_lock irq_context: 0 rtnl_mutex team->team_lock_key#76 &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#76 sysfs_symlink_target_lock irq_context: 0 rtnl_mutex team->team_lock_key#76 lock irq_context: 0 rtnl_mutex team->team_lock_key#76 lock kernfs_idr_lock irq_context: 0 rtnl_mutex team->team_lock_key#76 &root->kernfs_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#76 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#76 &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#76 &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#76 &obj_hash[i].lock pool_lock irq_context: 0 rtnl_mutex team->team_lock_key#76 rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex team->team_lock_key#76 rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#76 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#76 lweventlist_lock irq_context: 0 rtnl_mutex team->team_lock_key#76 lweventlist_lock &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#76 (console_sem).lock irq_context: 0 rtnl_mutex team->team_lock_key#76 console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex team->team_lock_key#76 console_lock console_srcu console_owner irq_context: 0 rtnl_mutex team->team_lock_key#76 console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex team->team_lock_key#76 console_lock console_srcu console_owner console_owner_lock irq_context: 0 rtnl_mutex team->team_lock_key#76 &____s->seqcount#2 irq_context: 0 rtnl_mutex team->team_lock_key#76 &____s->seqcount irq_context: 0 rtnl_mutex team->team_lock_key#76 &pcp->lock &zone->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#553 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#562 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#556 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1263 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1263 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1263 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1263 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1263 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1264 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#554 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#554 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1264 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#554 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1264 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#563 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#563 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#563 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#557 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#557 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#557 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#555 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#555 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#555 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1264 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1264 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1264 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1264 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1264 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1265 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#564 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#564 irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#558 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#556 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#556 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1265 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1265 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1265 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1265 irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 stock_lock irq_context: 0 sb_writers#5 &sb->s_type->i_mutex_key#13 pcpu_lock stock_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1057 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1057 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#356 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#356 irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#351 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#349 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#99 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#99 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#96 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1057 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1058 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#588 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#599 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#592 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#589 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &n->list_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#589 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#647 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#647 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#647 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#640 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1456 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#266 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#700 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#270 irq_context: 0 (wq_completion)hci0#9 irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#7 irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 sb_internal remove_cache_srcu &n->list_lock irq_context: 0 sb_internal remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_internal remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 sb_internal remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 &pcp->lock rcu_read_lock &p->pi_lock irq_context: 0 &pcp->lock rcu_read_lock &p->pi_lock &rq->__lock irq_context: 0 &pcp->lock rcu_read_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &data->open_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#10 irq_context: 0 misc_mtx (wq_completion)nfc2_nci_cmd_wq#1334 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#613 &rq->__lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#155 irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#640 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#640 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#649 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1399 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#95 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1399 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#636 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#231 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#229 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1399 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1398 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1398 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#661 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#661 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1398 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#654 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#589 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1314 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1314 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1398 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#654 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1398 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#654 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#242 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#242 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#242 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#646 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#646 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#646 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 jbd2_handle &____s->seqcount irq_context: 0 (wq_completion)nfc3_nci_rx_wq#639 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#639 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1375 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1375 &rq->__lock irq_context: 0 sb_writers#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1375 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#635 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1375 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1374 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1374 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#242 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#239 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#239 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#239 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#237 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#237 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#237 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1400 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1400 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1399 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1399 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1399 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#662 irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: hardirq &rcu_state.expedited_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#931 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#662 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#662 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#655 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#651 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#651 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#651 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1401 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 fs_reclaim irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 net_rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 &tn->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#76 &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1401 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#663 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#663 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#656 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#652 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)nfc3_nci_tx_wq#652 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#652 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1402 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1401 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1401 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1401 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1403 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1403 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1403 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1403 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1402 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1402 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#664 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#664 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#657 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#653 irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem stock_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem key irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem pcpu_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem percpu_counters_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 cb_lock genl_mutex rfkill_global_mutex &root->kernfs_rwsem pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)phy161 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#510 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#931 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#504 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#504 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc5_nci_cmd_wq#24 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#504 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#933 irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &idev->mc_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#17 irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock remove_cache_srcu irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#17 irq_context: 0 rtnl_mutex &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#502 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#162 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1179 irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#70 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#142 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 &xt[i].mutex &mm->mmap_lock key irq_context: 0 &xt[i].mutex &mm->mmap_lock pcpu_lock irq_context: 0 &xt[i].mutex &mm->mmap_lock percpu_counters_lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &____s->seqcount irq_context: 0 rtnl_mutex dev_addr_sem rcu_read_lock &rq->__lock cpu_asid_lock irq_context: hardirq &vkms_out->lock &dev->event_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex nf_hook_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: softirq rcu_read_lock hwsim_radio_lock &zone->lock irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#932 irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 rtnl_mutex &ndev->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem per_cpu_ptr(&cgroup_rstat_cpu_lock, cpu) irq_context: softirq rcu_read_lock rcu_read_lock &meta->lock irq_context: 0 nf_sockopt_mutex stock_lock irq_context: 0 nf_sockopt_mutex pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#932 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#932 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#352 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#100 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1179 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1179 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#498 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#653 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#653 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#243 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 devlinks.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &xa->xa_lock#19 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &n->list_lock irq_context: softirq rcu_read_lock rcu_read_lock k-slock-AF_INET#2 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock &tb->tb6_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock &tb->tb6_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &(&fn_net->fib_chain)->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 stack_depot_init_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex bpf_devs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex bpf_devs_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex bpf_devs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex bpf_devs_lock rcu_read_lock rhashtable_bucket irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &xa->xa_lock#4 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &devlink_port->type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex net_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &tn->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &x->wait#9 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex gdp_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex gdp_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex gdp_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex gdp_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex gdp_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex bus_type_sem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex sysfs_symlink_target_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex dpm_list_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex subsys mutex#20 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &dir->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex dev_hotplug_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex dev_base_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex input_pool.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &tbl->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex sysctl_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex nl_table_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex nl_table_wait.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex failover_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &idev->mc_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &pnettable->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex smc_ib_devices.mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &vn->sock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex batched_entropy_u32.lock crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex lock kernfs_idr_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &idev->mc_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 rtnl_mutex remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#76 &rq->__lock irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#76 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#76 &devlink_port->type_lock irq_context: 0 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key#23 &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock &rcu_state.expedited_wq irq_context: 0 &hdev->req_lock &hdev->lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &hdev->req_lock &hdev->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &hdev->req_lock &hdev->lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key#23 &p->pi_lock &rq->__lock irq_context: 0 rcu_read_lock &dentry->d_lock &sb->s_type->i_lock_key#23 &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#243 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem rtnl_mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex batched_entropy_u8.lock irq_context: 0 pernet_ops_rwsem ipvs->est_mutex kfence_freelist_lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#243 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#240 irq_context: 0 lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1186 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1187 irq_context: 0 pernet_ops_rwsem nf_ct_proto_mutex defrag4_mutex nf_hook_mutex &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1188 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#162 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#159 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1189 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#157 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#505 irq_context: 0 misc_mtx (wq_completion)nfc5_nci_cmd_wq#28 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &hard_iface->bat_iv.ogm_buff_mutex &lock->wait_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &lock->wait_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &p->pi_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1193 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1193 irq_context: 0 misc_mtx nfc_devlist_mutex remove_cache_srcu rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#272 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#272 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#268 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#266 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1459 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1459 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1459 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1459 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1458 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1458 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#709 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#709 irq_context: 0 (wq_completion)gid-cache-wq &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#702 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#698 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1460 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1460 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1314 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1314 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1314 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1315 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1458 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1457 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#708 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#708 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#708 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#701 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#697 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1457 irq_context: 0 rtnl_mutex &bat_priv->softif_vlan_list_lock &____s->seqcount#2 irq_context: 0 rtnl_mutex &bat_priv->softif_vlan_list_lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#73 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#73 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#73 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#73 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#68 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#68 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#68 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#68 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#238 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1404 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1404 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1403 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1403 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1403 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1403 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) &list->lock#5 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &list->lock#7 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) &rq->__lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1460 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1460 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1459 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1459 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1461 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1461 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1460 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1460 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1460 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1460 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#710 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#710 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#703 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#699 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1462 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 stock_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &dentry->d_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &dentry->d_lock &wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1462 &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh key#20 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &entry->crc_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &idev->mc_lock rcu_read_lock rcu_read_lock rcu_read_lock_bh &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1315 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1315 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &lock->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#207 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#207 irq_context: 0 rtnl_mutex cpu_hotplug_lock wq_pool_mutex.wait_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock &p->pi_lock irq_context: 0 rtnl_mutex cpu_hotplug_lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex cpu_hotplug_lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &xt[i].mutex &mm->mmap_lock rcu_node_0 irq_context: 0 &xt[i].mutex &mm->mmap_lock &rcu_state.expedited_wq irq_context: 0 &xt[i].mutex &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &xt[i].mutex &mm->mmap_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#204 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1462 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1461 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1461 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#711 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &ei->i_data_sem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->tt.work)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#711 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#704 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#704 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#704 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#700 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#76 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#665 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#665 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#658 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#700 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#700 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1463 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1463 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1462 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1462 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1464 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1464 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1464 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1464 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1463 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1463 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1463 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1463 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#712 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#712 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#705 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#701 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#701 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#701 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1465 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1465 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1465 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#713 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#713 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1465 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1464 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1464 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &dev->mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1464 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1464 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#706 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#935 irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#935 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#702 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1466 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1466 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1466 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1466 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1465 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1465 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1465 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#714 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#714 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#707 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#703 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1465 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1465 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1465 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#273 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#273 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#269 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#269 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#269 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#267 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1467 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1466 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1466 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1466 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1466 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#715 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#715 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#715 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#715 irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock rcu_node_0 irq_context: 0 rtnl_mutex rcu_read_lock rcu_read_lock &rq->__lock irq_context: 0 rtnl_mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#708 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#704 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#937 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#102 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#938 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#704 &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#511 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#511 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#511 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#511 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#505 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#704 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1468 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1468 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1468 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1468 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1467 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1467 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1467 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &dev->clientlist_mutex &helper->lock &dev->master_mutex &client->modeset_mutex crtc_ww_class_acquire crtc_ww_class_mutex fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1467 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1467 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1467 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#709 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#709 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#709 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#705 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#274 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#274 irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)gid-cache-wq (work_completion)(&ndev_work->work) quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#940 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#940 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#941 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#941 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#943 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#943 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#944 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#944 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#945 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu irq_context: 0 (wq_completion)nfc4_nci_rx_wq#270 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#270 &rq->__lock irq_context: 0 rtnl_mutex sysctl_lock krc.lock &base->lock irq_context: 0 rtnl_mutex sysctl_lock krc.lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#268 irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg2#139 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#140 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1469 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1469 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1468 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1468 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1468 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1468 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1468 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1470 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1470 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#505 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#505 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#19 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_tx_wq#19 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#19 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#19 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#503 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 cgroup_threadgroup_rwsem rcu_node_0 irq_context: 0 cgroup_threadgroup_rwsem &rcu_state.expedited_wq irq_context: 0 cgroup_threadgroup_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx rcu_state.exp_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#7 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &c->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &n->list_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#369 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &c->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock &rq->__lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->tx_work) &list->lock#5 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)hci5#4 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1266 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#192 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#187 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#187 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#304 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#304 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1545 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1545 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#304 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1545 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex subsys mutex#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#43 irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-kex-wg0#155 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &rq->__lock irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex &wg->device_update_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 &meta->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dir->lock#2 kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 rcu_read_lock &vma->vm_lock->lock percpu_counters_lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)events (work_completion)(&w->work)#2 nf_conntrack_mutex &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem uevent_sock_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#156 irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#948 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#950 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#949 irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &base->lock irq_context: 0 pernet_ops_rwsem remove_cache_srcu &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#154 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock &n->list_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &im->lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 pernet_ops_rwsem batched_entropy_u8.lock irq_context: 0 pernet_ops_rwsem kfence_freelist_lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#949 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#77 irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_ifc_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &____s->seqcount#2 irq_context: 0 &sighand->siglock &n->list_lock irq_context: 0 &sighand->siglock &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock uevent_sock_mutex &____s->seqcount#2 irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#163 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#163 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#163 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#163 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#160 irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#4 batched_entropy_u8.lock irq_context: 0 sb_writers#4 kfence_freelist_lock irq_context: 0 sb_writers#4 &meta->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)mld (work_completion)(&(&idev->mc_dad_work)->work) &idev->mc_lock rcu_read_lock rcu_read_lock &n->lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#160 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#160 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#158 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#158 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#29 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &dev->mutex uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#27 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#43 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#514 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#506 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#165 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#515 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#516 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem nf_conntrack_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)bond0#79 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#79 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#79 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#516 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#516 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#516 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#510 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc3_nci_rx_wq#510 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#517 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg2#157 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg2#156 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg2#77 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#334 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#334 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg2#152 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#334 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg2#153 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#329 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#328 irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#94 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#94 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#91 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#91 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#91 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) fill_pool_map-wait-type-override &____s->seqcount#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->orig_work)->work) fill_pool_map-wait-type-override pool_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#9 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sb->s_type->i_mutex_key#10 (netlink_chain).rwsem rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#881 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#881 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#881 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#883 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#335 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#335 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#335 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#335 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#330 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#329 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#882 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#882 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#882 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#884 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg0#155 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg0#156 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#883 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#883 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#885 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &base->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#336 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#336 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#336 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#331 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#331 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#330 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#886 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 rcu_read_lock &pool->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#518 irq_context: 0 (wq_completion)wg-crypt-wg0#71 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 &n->list_lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock irq_context: 0 nfc_devlist_mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1202 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1202 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1202 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1203 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg0#79 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1203 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1203 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1203 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1204 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#522 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#522 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#516 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#514 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#514 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#514 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg0#157 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1204 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1204 irq_context: 0 sb_writers#5 &base->lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx batched_entropy_u8.lock irq_context: 0 (wq_completion)events (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx kfence_freelist_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &simple_offset_xa_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &xattrs->lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 smack_known_lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 smack_known_lock &obj_hash[i].lock irq_context: 0 sb_writers &type->i_mutex_dir_key#2 &sb->s_type->i_lock_key#5 &dentry->d_lock irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 pcpu_lock stock_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1205 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1206 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1206 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1206 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1207 irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &mm->mmap_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#153 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#78 irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fib_info_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->cmd_work) &c->lock irq_context: hardirq &vkms_out->lock &dev->event_lock &x->wait#15 &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &n->list_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start rcu_node_0 irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &vma->vm_lock->lock fs_reclaim mmu_notifier_invalidate_range_start &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg2#152 irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 rtnl_mutex remove_cache_srcu stock_lock irq_context: 0 rtnl_mutex remove_cache_srcu key irq_context: 0 rtnl_mutex remove_cache_srcu pcpu_lock irq_context: 0 rtnl_mutex remove_cache_srcu percpu_counters_lock irq_context: 0 rtnl_mutex remove_cache_srcu pcpu_lock stock_lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 cb_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#153 irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) rcu_node_0 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag4_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#154 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#152 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1058 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1058 irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#77 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#153 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg0#78 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#212 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#212 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#25 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1336 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#29 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#29 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#29 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#27 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 rcu_node_0 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#512 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#512 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#506 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#506 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#506 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#504 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#513 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#513 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#164 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#164 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#164 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#164 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#161 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#507 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#159 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#505 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1204 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1204 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1204 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1204 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1204 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1205 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#523 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#523 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#523 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#523 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#517 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg0#158 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#515 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#514 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#514 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#514 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#508 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#165 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#162 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#162 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#162 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#160 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#515 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#515 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#515 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#509 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#507 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#166 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#202 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#166 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#166 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#166 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#163 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#163 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#163 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#161 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#30 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#30 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#30 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#30 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#28 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#28 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#28 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#28 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#510 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#508 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#508 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#508 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex triggers_list_lock &rq->__lock irq_context: 0 cb_lock genl_mutex triggers_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 sb_writers#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#167 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &sb->s_type->i_mutex_key#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rq->__lock cpu_asid_lock irq_context: 0 cb_lock genl_mutex &rcu_state.expedited_wq irq_context: 0 cb_lock genl_mutex &rcu_state.expedited_wq &p->pi_lock irq_context: 0 cb_lock genl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 cb_lock genl_mutex &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &n->list_lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &obj_hash[i].lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 cb_lock genl_mutex rtnl_mutex &rdev->wiphy.mtx fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 cb_lock rtnl_mutex &rdev->wiphy.mtx fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex &ndev->lock &ifa->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx &sb->s_type->i_mutex_key#3 &dentry->d_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#167 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#164 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tn->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 krc.lock &base->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &macvlan_netdev_addr_lock_key/1 krc.lock &base->lock &obj_hash[i].lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock &n->list_lock irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock &in_dev->mc_tomb_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc4_nci_tx_wq#162 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#517 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#511 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#509 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->dat.work)->work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock kfence_freelist_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &tbl->lock &meta->lock irq_context: 0 sb_writers#5 &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1205 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)bond0#80 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1205 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#525 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#519 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1206 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#524 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#524 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#518 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#516 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1207 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1207 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1207 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1207 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1207 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1207 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1207 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1208 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1208 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1208 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#337 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1208 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#337 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1208 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#337 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1208 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#337 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1209 irq_context: 0 tty_mutex &tty->legacy_mutex rcu_read_lock &rq->__lock irq_context: 0 tty_mutex &tty->legacy_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#332 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#331 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock crngs.lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &tbl->lock irq_context: 0 sk_lock-AF_INET remove_cache_srcu rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &n->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#169 irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#169 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#169 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#169 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#166 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#164 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#517 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#517 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#517 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1210 irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1211 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#32 irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1213 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#32 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#170 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#529 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1216 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 &vma->vm_lock->lock rcu_read_lock rcu_read_lock rcu_read_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#530 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#530 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1217 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#531 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#172 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#169 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#167 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#532 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1219 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#30 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#30 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1209 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1209 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1219 irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rcu_state.expedited_wq irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &f->f_pos_lock &type->i_mutex_dir_key#3 &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#173 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1209 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1209 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#518 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#512 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#510 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1209 irq_context: 0 (wq_completion)events_power_efficient (gc_work).work fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1209 &rq->__lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work fill_pool_map-wait-type-override &c->lock irq_context: 0 &tty->legacy_mutex &n->list_lock irq_context: 0 &tty->legacy_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)events_power_efficient (gc_work).work fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock &pcp->lock &zone->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#174 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1221 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1209 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1210 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1210 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1210 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#526 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#526 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#520 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#518 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1210 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1210 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1210 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1210 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1210 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1210 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1211 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#527 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#527 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#521 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#519 irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1211 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1211 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1211 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1211 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bond0#79 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#79 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)bond0#79 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &rq->__lock irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)krxrpcd (work_completion)(&rxnet->peer_keepalive_work) fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#172 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#172 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1222 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#34 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#34 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1224 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#538 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1212 irq_context: 0 (wq_completion)wg-kex-wg0#163 irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 kn->active#48 &____s->seqcount#2 irq_context: 0 kn->active#48 &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#161 irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#216 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1339 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 rtnl_mutex devnet_rename_sem uevent_sock_mutex.wait_lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#9 &of->mutex cgroup_mutex cpu_hotplug_lock cgroup_threadgroup_rwsem fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#7 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 tty_mutex &tty->legacy_mutex &tty->ldisc_sem &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci0#9 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 &dentry->d_lock fill_pool_map-wait-type-override &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &c->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events deferred_process_work &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &____s->seqcount#2 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &____s->seqcount irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &c->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &n->list_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->tx_work) &list->lock#5 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci0#10 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 (wq_completion)hci0#10 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)hci0#10 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1470 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1470 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1469 irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1469 &rq->__lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq irq_context: 0 pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 pernet_ops_rwsem rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1469 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1469 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1471 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1471 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1471 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1470 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1470 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1470 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1470 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#717 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#717 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#717 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#717 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#710 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#710 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#706 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1472 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1472 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1472 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1472 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1471 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1471 &rq->__lock irq_context: 0 &p->lock remove_cache_srcu rcu_node_0 irq_context: 0 &p->lock remove_cache_srcu &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 tty_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)wg-kex-wg1#129 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem sysctl_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#4 oom_adj_mutex rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#539 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#539 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#539 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#539 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#533 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#531 irq_context: 0 sb_writers#3 sb_internal jbd2_handle &obj_hash[i].lock pool_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &p->lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &p->lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &p->lock remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1471 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1471 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1471 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#707 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#707 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#719 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#275 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#69 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&conn->disc_work)->work) irq_context: 0 &sig->cred_guard_mutex sb_writers#3 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_tx_wq#16 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &dir->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex dev_hotplug_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1546 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#770 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#771 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex dev_base_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex input_pool.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &tbl->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex sysctl_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex failover_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &idev->mc_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &pnettable->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex smc_ib_devices.mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &vn->sock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex uevent_sock_mutex &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1070 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1070 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex uevent_sock_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &idev->mc_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &idev->mc_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &idev->mc_lock _xmit_ETHER &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex lock kernfs_idr_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex lock kernfs_idr_lock &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &idev->mc_lock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1070 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1070 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1070 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1071 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1071 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1071 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#82 irq_context: 0 cb_lock nlk_cb_mutex-GENERIC &devlink->lock_key#82 &devlink_port->type_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1071 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1071 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1071 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1071 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1072 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1072 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1072 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#437 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#437 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#437 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#437 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#431 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#429 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#429 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1072 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1072 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1072 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1072 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rcu_state.exp_mutex rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1072 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1073 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1074 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1074 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1074 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1074 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1074 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1074 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#438 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#438 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#438 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#438 irq_context: softirq (&ndev->rs_timer) &ndev->lock fill_pool_map-wait-type-override pool_lock irq_context: softirq (&ndev->rs_timer) &ndev->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#432 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1547 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1547 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1547 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1545 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#307 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#432 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#432 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#432 &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#135 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#135 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#132 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#130 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1075 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1075 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1075 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1075 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1076 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1076 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1077 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1226 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#540 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#90 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#84 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#84 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#84 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#766 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1550 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1228 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1550 irq_context: 0 misc_mtx (wq_completion)nfc5_nci_cmd_wq#91 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1551 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#777 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#777 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1555 irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1555 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1555 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1555 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#779 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#779 &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&(&devlink->rwork)->work) &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#779 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1556 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#310 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1557 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1557 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1555 &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1558 irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&p->wq) rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1559 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1559 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#780 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#780 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1560 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1559 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1559 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1562 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1562 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1563 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#771 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#771 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1565 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#782 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#782 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#769 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1567 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1077 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#439 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#439 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#433 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#431 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#784 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1568 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1228 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1228 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1229 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#177 irq_context: 0 sb_writers#5 stock_lock irq_context: 0 sb_writers#5 key irq_context: 0 sb_writers#5 pcpu_lock irq_context: 0 sb_writers#5 percpu_counters_lock irq_context: 0 sb_writers#5 pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1566 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#785 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1570 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1570 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1568 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1568 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1568 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1568 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1568 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1571 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1572 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1572 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1570 irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1570 &rq->__lock irq_context: softirq rcu_read_lock rcu_read_lock slock-AF_INET/1 fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1573 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1573 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1573 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1573 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1571 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1574 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1574 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1572 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1077 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1572 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1575 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1575 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1575 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1576 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1576 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1230 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1230 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1230 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#178 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#36 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#36 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1233 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1235 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1235 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#37 irq_context: 0 misc_mtx (wq_completion)nfc5_nci_cmd_wq#39 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#718 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#718 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#711 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#711 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1473 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1473 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1473 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1473 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1473 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1473 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1472 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#658 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1576 &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock stock_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock key irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock pcpu_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock percpu_counters_lock irq_context: 0 misc_mtx nfc_devlist_mutex uevent_sock_mutex rcu_read_lock pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1576 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1574 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1577 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1577 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1575 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1575 irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1235 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1235 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#537 irq_context: 0 &dev->mutex &root->kernfs_rwsem &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1238 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1238 irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1578 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1578 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1576 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1239 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1239 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1579 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1579 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1579 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1579 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1580 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1580 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1580 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1578 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1581 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1581 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1579 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1240 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#547 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#547 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#180 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#180 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#548 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#549 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1244 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1244 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1246 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1247 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1247 &rq->__lock irq_context: 0 &tty->legacy_mutex devpts_mutex &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1247 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#658 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#654 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#654 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#654 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#241 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#241 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1579 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1582 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1582 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1582 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1582 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1580 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1580 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1580 irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1580 &rq->__lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem rcu_node_0 irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1583 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1583 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1583 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock rcu_node_0 irq_context: 0 &ndev->req_lock &rcu_state.expedited_wq irq_context: 0 &ndev->req_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &cfs_rq->removed.lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle rcu_read_lock percpu_counters_lock irq_context: 0 &ndev->req_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx lweventlist_lock &n->list_lock &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock &obj_hash[i].lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock key irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock pcpu_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle &sbi->s_orphan_lock pool_lock#2 irq_context: 0 &ndev->req_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1583 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1581 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1077 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1077 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1584 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1584 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1582 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1582 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1585 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1585 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1585 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1583 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &journal->j_state_lock &journal->j_wait_transaction_locked irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1586 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1586 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1586 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1587 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1587 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1587 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1587 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1588 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1588 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1586 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1589 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1589 irq_context: 0 &dev->mutex uevent_sock_mutex rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1590 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1590 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1590 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#7 irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#780 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#780 &rq->__lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &base->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &rq->__lock irq_context: 0 pernet_ops_rwsem rtnl_mutex dev_hotplug_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1595 irq_context: 0 &sb->s_type->i_mutex_key#10 sk_lock-AF_INET6 rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1592 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#780 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#776 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1612 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1612 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1594 irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#3 fill_pool_map-wait-type-override &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1612 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1612 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1610 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1610 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1610 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1610 irq_context: softirq (&in_dev->mr_ifc_timer) rcu_read_lock rcu_read_lock_bh quarantine_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_rcv#10 irq_context: 0 (wq_completion)bond0#84 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1595 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1596 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1596 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1596 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1596 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1599 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1613 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1613 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1613 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1613 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1611 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1611 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 kn->active#4 &kernfs_locks->open_file_mutex[count] rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1611 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1611 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1611 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#84 &nsim_trap_data->trap_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#84 &nsim_trap_data->trap_lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1614 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1614 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1614 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1614 stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1614 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1614 key irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1614 pcpu_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1614 percpu_counters_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1614 pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1614 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1612 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1612 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1615 irq_context: 0 &sig->cred_guard_mutex &mm->mmap_lock rcu_read_lock ptlock_ptr(ptdesc)#2 lock#4 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1615 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &base->lock irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5 tomoyo_ss &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1613 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1613 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1616 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1614 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1614 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1614 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1614 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1614 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1614 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1617 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1617 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1617 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1617 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1615 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1615 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &idev->mc_query_lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &vma->vm_lock->lock rcu_node_0 irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &cfs_rq->removed.lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &obj_hash[i].lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem pool_lock#2 irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem rcu_node_0 irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rcu_state.expedited_wq irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#241 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#553 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#239 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#547 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#42 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#554 irq_context: 0 (wq_completion)hci5#3 irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock &list->lock#7 irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) &list->lock#5 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &list->lock#7 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci5#3 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &n->list_lock irq_context: 0 (wq_completion)hci5#4 (work_completion)(&hdev->rx_work) &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1618 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1618 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#777 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#792 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1620 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1620 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#793 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#314 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1622 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1622 rcu_node_0 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex remove_cache_srcu &cfs_rq->removed.lock irq_context: 0 rtnl_mutex &br->lock &br->hash_lock rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#12 &rq->__lock irq_context: 0 rtnl_mutex dpm_list_mtx &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#10 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bond0#81 irq_context: 0 (wq_completion)bond0#81 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#81 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#81 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#81 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 misc_mtx hrtimer_bases.lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx hrtimer_bases.lock fill_pool_map-wait-type-override &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#33 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#33 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1091 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1091 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1091 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1091 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1092 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#447 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#447 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#447 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#447 stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#447 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#447 key irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#447 pcpu_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#447 pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#447 percpu_counters_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#447 pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#447 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#441 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#439 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1092 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-crypt-wg1#79 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg1#157 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg1#158 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1092 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1092 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1092 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1093 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg0#159 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &wg->device_update_lock (wq_completion)wg-kex-wg0#160 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1093 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1093 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1093 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1093 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1094 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#448 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#448 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#442 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#440 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#26 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#25 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#25 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#440 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#440 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1094 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1094 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock quarantine_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)bond0#81 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1095 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1095 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#18 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#15 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_tx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1476 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#76 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#76 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#20 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (work_completion)(&local->sdreq_timeout_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#77 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 nfc_devlist_mutex &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &rq->__lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#24 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#24 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#23 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#23 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#15 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim &cfs_rq->removed.lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex fs_reclaim pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc31_nci_rx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc31_nci_tx_wq#10 irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#20 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#20 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#35 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1479 irq_context: 0 (wq_completion)nfc34_nci_tx_wq#3 irq_context: 0 (wq_completion)bond0#81 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 (wq_completion)bond0#81 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#81 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)nfc34_nci_tx_wq#3 &rq->__lock irq_context: 0 (wq_completion)nfc34_nci_tx_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#3 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#3 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#3 &rq->__lock irq_context: 0 (wq_completion)nfc33_nci_tx_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#723 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#723 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1480 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1480 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1479 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1481 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1095 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1095 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1095 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1096 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#449 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#140 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#140 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#443 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#137 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#441 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#135 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 lweventlist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 rcu_read_lock &pool->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1482 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1482 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#725 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#879 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#879 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#879 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#879 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#879 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#881 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#881 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#881 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#157 irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#329 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#329 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1482 irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#324 irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#323 irq_context: 0 (wq_completion)wg-kex-wg2#156 irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#880 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#880 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#880 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#880 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#882 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#882 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#882 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1078 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1078 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1078 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1078 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1079 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#331 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#331 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#331 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#326 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#326 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#326 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#332 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#332 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#332 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#332 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#327 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#326 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock irq_context: 0 misc_mtx remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem ovs_mutex nf_ct_proto_mutex defrag6_mutex &rq->__lock irq_context: 0 misc_mtx remove_cache_srcu &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1485 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1486 irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#157 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#160 irq_context: 0 (wq_completion)wg-kex-wg2#157 irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1487 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1487 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1487 irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->k_lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#157 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg0#80 irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&bat_priv->mcast.work)->work) fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#158 irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#79 irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#79 irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#885 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#885 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#885 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#885 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#885 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#887 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#888 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#887 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#889 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#888 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#888 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#888 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#888 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#338 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#338 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#333 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#332 irq_context: 0 (work_completion)(&local->timeout_work) &rq->__lock irq_context: 0 (work_completion)(&local->timeout_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#890 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#95 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#92 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#92 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#92 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#91 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#891 irq_context: softirq (&p->timer) &br->multicast_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: softirq (&mp->timer) &br->multicast_lock &n->list_lock irq_context: softirq (&mp->timer) &br->multicast_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)ipv6_addrconf (work_completion)(&(&ifa->dad_work)->work) rtnl_mutex rcu_read_lock rcu_read_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock kfence_freelist_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#890 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#890 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#892 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#334 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#334 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#333 irq_context: 0 &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem sched_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#891 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#891 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#891 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#893 irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg2#128 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#892 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#892 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#892 irq_context: 0 misc_mtx (wq_completion)nfc4_nci_tx_wq#92 irq_context: 0 misc_mtx (wq_completion)nfc4_nci_tx_wq#92 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1489 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1489 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1489 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#729 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1491 irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &mm->mmap_lock rcu_read_lock &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc4_nci_tx_wq#92 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc4_nci_tx_wq#92 &cfs_rq->removed.lock irq_context: 0 misc_mtx (wq_completion)nfc4_nci_tx_wq#92 &obj_hash[i].lock irq_context: 0 misc_mtx (wq_completion)nfc4_nci_rx_wq#93 irq_context: 0 misc_mtx (wq_completion)nfc4_nci_cmd_wq#96 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#894 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#340 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#340 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#335 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1491 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1491 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#334 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#893 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1491 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#893 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#279 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#277 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#895 irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#894 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#894 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#896 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1494 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1494 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#720 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#285 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#285 irq_context: 0 &ep->mtx &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1497 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1499 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1499 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1499 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1499 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1500 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1500 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1501 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1501 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1501 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1501 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1502 irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#287 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#287 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#287 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#287 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#283 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1507 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1507 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#738 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#730 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#739 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1508 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#731 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1509 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1510 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1510 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1510 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#895 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#341 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#341 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#336 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#895 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1510 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1509 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1509 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#741 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#741 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#741 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#741 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#742 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1511 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#285 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#283 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#290 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#290 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#290 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#744 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1514 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1514 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#287 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1518 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1516 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1516 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1515 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1517 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#78 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#293 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#737 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#79 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1520 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1520 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#751 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#752 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1523 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#753 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#753 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#745 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#745 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#745 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#36 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#21 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#17 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx &rq->__lock &obj_hash[i].lock irq_context: 0 misc_mtx &rq->__lock &base->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_rx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc42_nci_tx_wq#3 irq_context: 0 misc_mtx nfc_devlist_mutex rcu_read_lock &rq->__lock irq_context: 0 misc_mtx nfc_devlist_mutex rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc49_nci_cmd_wq#2 &rq->__lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci0#10 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci0#10 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#10 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1524 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#297 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#297 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#755 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#895 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#335 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#897 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#756 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#22 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#896 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#896 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#22 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#42 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc36_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1529 irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc46_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#896 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#898 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#342 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1079 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1079 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#440 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#440 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#440 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#440 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#434 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#434 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#759 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#434 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#136 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#432 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#136 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#133 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1079 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1079 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1532 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1532 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1532 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1532 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#131 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1080 irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex stock_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex key irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex pcpu_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex percpu_counters_lock irq_context: 0 misc_mtx cpu_hotplug_lock wq_pool_mutex pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#441 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#441 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#435 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#433 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1080 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1080 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1096 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1096 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1096 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1096 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 &mapping->i_mmap_rwsem &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1097 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#141 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#141 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#138 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#136 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#450 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#83 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#83 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#83 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1534 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1536 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1536 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1532 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#753 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1540 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1540 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#765 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1538 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#767 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#767 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1541 irq_context: 0 &ep->mtx &mm->mmap_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#342 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#342 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#342 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#337 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#336 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#897 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->tvlv.container_list_lock &meta->lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#897 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#897 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#899 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#343 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#338 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#337 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#898 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#898 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#898 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#900 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem fs_reclaim &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#900 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#900 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#900 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#900 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#902 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#344 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#344 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#339 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#338 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#901 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#901 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#904 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#345 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#345 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#340 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#339 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#346 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#903 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#903 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#903 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#903 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#905 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#932 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#932 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#934 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#357 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#357 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#352 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#352 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#350 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#350 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#350 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#100 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#97 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#96 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#933 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#933 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#935 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#935 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#935 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#358 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#358 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#353 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#353 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#353 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#351 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#101 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#101 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#98 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#97 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#934 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#934 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#936 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#936 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#935 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#935 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#937 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#936 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#936 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#936 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#936 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#938 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#359 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#359 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#354 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#352 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#937 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#937 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#937 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#937 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#937 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#939 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#360 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#360 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#360 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#360 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#355 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#353 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#353 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#353 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#102 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#102 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#102 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#99 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#98 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#938 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#938 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#938 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#938 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#940 irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#156 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#158 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#79 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg2#79 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#361 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#361 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#361 &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#361 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#356 irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#354 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#354 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#939 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#939 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#939 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#939 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#939 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#941 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#103 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#103 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#103 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#103 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#100 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#99 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#362 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#362 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#357 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#355 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#355 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#355 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#940 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#940 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#942 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#363 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#363 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#363 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#363 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#358 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#356 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex uevent_sock_mutex.wait_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#104 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#104 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#101 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#100 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#941 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#364 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#364 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#359 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#357 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#365 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#365 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#942 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#942 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#942 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#944 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#360 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#358 irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#943 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#366 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#366 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#366 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#943 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#945 irq_context: 0 sk_lock-AF_INET rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#366 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#361 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#359 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#944 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#946 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#946 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#945 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#945 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#947 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#367 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#367 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#362 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#360 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu quarantine_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#946 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#946 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#946 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#946 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#946 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#948 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#368 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#368 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#363 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#361 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#947 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#947 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#947 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#947 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#949 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#369 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#364 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#362 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#362 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#362 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#948 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#948 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#105 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#105 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#102 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#102 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#102 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#101 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#949 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#949 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#949 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#951 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#951 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#951 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#951 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#370 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#370 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#370 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#370 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#365 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#363 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#950 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#952 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#366 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#366 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#366 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#364 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#364 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#364 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#951 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#953 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#372 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#372 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#367 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#365 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#952 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#952 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#952 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#952 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#954 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#373 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#373 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#368 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#366 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#106 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#106 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#106 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#103 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#102 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc7_nci_tx_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc14_nci_rx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#4 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim &obj_hash[i].lock irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 fs_reclaim pool_lock#2 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#4 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#4 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#4 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc25_nci_tx_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_rx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#5 irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)bat_events (work_completion)(&(&forw_packet_aggr->delayed_work)->work) &hard_iface->bat_iv.ogm_buff_mutex &bat_priv->forw_bat_list_lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc36_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc37_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc37_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc37_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc37_nci_tx_wq#3 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#953 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#953 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#955 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#374 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#374 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#369 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#367 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#367 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#367 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#107 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#107 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#107 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#107 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#104 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#103 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_rx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#375 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#375 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#370 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#368 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_rx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#5 irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex rcu_read_lock pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc17_nci_rx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#4 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#6 irq_context: 0 &dev->mutex dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc23_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#4 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc24_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#4 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc24_nci_tx_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc40_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc40_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc40_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc40_nci_tx_wq#2 irq_context: 0 (wq_completion)nfc40_nci_tx_wq#2 &rq->__lock irq_context: 0 (wq_completion)nfc40_nci_tx_wq#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#954 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#954 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#954 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#956 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#108 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#108 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#105 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#104 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#371 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#371 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#371 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#369 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc9_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc12_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc39_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc39_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc39_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc38_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc38_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc38_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc38_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc37_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc37_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc37_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc37_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc36_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc36_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc36_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc35_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc35_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc35_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc33_nci_rx_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc33_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#377 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#377 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#372 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#370 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#109 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#109 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#106 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#105 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#955 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#955 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#955 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#957 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#373 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#373 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#371 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#110 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#957 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#957 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#959 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#958 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#958 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#958 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#958 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#960 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#959 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#959 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#961 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#111 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#960 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#960 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#960 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#108 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#107 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#374 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#962 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#372 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#961 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#961 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#963 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#962 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#964 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#964 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#964 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#963 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#965 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#965 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#964 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#964 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#964 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#964 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#964 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#966 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#966 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#380 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#380 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#375 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#373 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#965 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#965 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#965 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#965 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#965 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#965 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#967 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#966 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#966 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#966 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#966 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#968 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#381 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#381 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#381 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#381 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#376 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#374 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#383 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#967 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#967 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#969 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#377 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#375 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#112 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#112 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#109 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#108 irq_context: 0 crtc_ww_class_mutex &cfs_rq->removed.lock irq_context: 0 crtc_ww_class_mutex &obj_hash[i].lock irq_context: 0 crtc_ww_class_mutex pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#968 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#970 irq_context: 0 sb_writers#4 &sb->s_type->i_mutex_key#9 remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#113 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#113 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#113 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#113 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#110 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#109 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#384 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#384 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#384 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#384 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#384 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#384 pool_lock#2 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#384 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#378 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#378 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#378 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#376 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#969 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#969 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#971 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#972 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#114 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#114 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#111 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#110 irq_context: 0 &type->i_mutex_dir_key#4 remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#385 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#385 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#385 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#385 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#117 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#978 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#117 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#114 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#113 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#113 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#113 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1001 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1001 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1002 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#406 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#406 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#406 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#406 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#400 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#398 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1002 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1002 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1003 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1004 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#407 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#401 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#399 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#126 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#126 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1004 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1004 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1004 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1004 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1004 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1005 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1005 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1005 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1006 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1006 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1006 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1006 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1007 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#408 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#408 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#402 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#402 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#402 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#400 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#127 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#127 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#124 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1008 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1008 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1008 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1009 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#403 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#401 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1009 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1009 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1009 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1010 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1010 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1010 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1010 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1010 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1010 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1010 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1011 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1011 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1011 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1011 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1011 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1011 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1011 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1011 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1011 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1012 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1012 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1012 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1013 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1013 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1014 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1014 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1015 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1015 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1015 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1016 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#410 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#410 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#410 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#410 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#404 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#404 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#404 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#128 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#128 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#125 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#402 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#124 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#124 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#124 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1016 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1016 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events_unbound (work_completion)(&sub_info->work) remove_cache_srcu &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1017 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1017 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1017 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1017 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1017 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1018 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1018 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#411 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#411 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#411 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#411 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#405 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1018 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1018 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1018 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1019 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1019 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1019 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1019 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1023 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1024 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1024 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1024 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1024 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1024 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1024 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1024 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1024 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1025 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu percpu_counters_lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal remove_cache_srcu pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1025 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1025 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1025 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1025 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1025 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1025 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1026 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1026 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1026 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1026 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1027 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1027 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1027 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1027 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1027 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1028 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1028 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1029 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1029 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1029 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#412 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#412 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#412 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#404 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#7 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &rq->__lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci4#5 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &c->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->tx_work) &list->lock#5 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)hci4#6 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci4#6 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1058 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1059 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1059 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1060 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1060 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1060 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1060 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1060 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1060 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1061 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#427 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#421 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#419 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#450 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#444 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#442 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1097 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1097 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1097 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1081 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1097 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1097 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 jbd2_handle rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1098 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1081 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1081 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1081 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1081 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1081 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1082 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#436 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#434 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#137 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#137 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#134 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#132 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1082 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1083 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1083 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1083 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1083 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1083 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1083 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1084 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1084 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1084 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1084 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1085 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#443 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#443 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#437 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#435 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1085 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1085 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1085 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1086 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#142 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#142 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#139 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#137 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1098 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1099 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#451 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#451 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#445 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#443 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1099 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1099 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1099 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1100 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#452 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#452 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#452 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#438 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#436 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#138 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#138 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#135 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) rcu_node_0 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &rcu_state.expedited_wq irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1087 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1088 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#439 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#437 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1088 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1088 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1088 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1088 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1089 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#159 irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &tbl->lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &n->lock irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#8 irq_context: 0 (wq_completion)wg-kex-wg1#159 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg2#158 irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#158 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg0#162 irq_context: 0 (wq_completion)wg-kex-wg0#162 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#162 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1061 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1061 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1061 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1062 irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#160 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#159 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#452 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#446 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#428 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#428 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#428 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#428 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#422 irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#160 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#80 irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1062 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1062 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1062 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1062 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1062 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#420 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1063 irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) &rq->__lock irq_context: 0 &mm->mmap_lock remove_cache_srcu &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#429 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#429 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#429 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#423 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#421 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#421 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#421 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1063 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1063 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1063 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1064 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#131 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#131 irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc4_nci_rx_wq#128 irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc4_nci_tx_wq#126 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#430 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#430 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#424 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#422 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#444 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#444 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#444 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount#2 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &____s->seqcount irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)bond0#82 irq_context: 0 (wq_completion)bond0#82 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#82 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#82 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#82 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock remove_cache_srcu &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#81 irq_context: 0 rtnl_mutex team->team_lock_key#82 irq_context: 0 rtnl_mutex team->team_lock_key#82 &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#82 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#82 fs_reclaim irq_context: 0 rtnl_mutex team->team_lock_key#82 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex team->team_lock_key#82 netpoll_srcu irq_context: 0 rtnl_mutex team->team_lock_key#82 net_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#82 net_rwsem &list->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#82 &tn->lock irq_context: 0 rtnl_mutex team->team_lock_key#82 _xmit_ETHER irq_context: 0 rtnl_mutex team->team_lock_key#82 &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#82 input_pool.lock irq_context: 0 rtnl_mutex team->team_lock_key#82 &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#82 rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#82 &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#82 nl_table_lock irq_context: 0 rtnl_mutex team->team_lock_key#82 nl_table_wait.lock irq_context: 0 rtnl_mutex team->team_lock_key#82 rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex team->team_lock_key#82 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#82 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex team->team_lock_key#82 &in_dev->mc_tomb_lock irq_context: 0 rtnl_mutex team->team_lock_key#82 &im->lock irq_context: 0 rtnl_mutex team->team_lock_key#82 cbs_list_lock irq_context: 0 rtnl_mutex team->team_lock_key#82 &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#82 sysfs_symlink_target_lock irq_context: 0 rtnl_mutex team->team_lock_key#82 lock irq_context: 0 rtnl_mutex team->team_lock_key#82 lock kernfs_idr_lock irq_context: 0 rtnl_mutex team->team_lock_key#82 &root->kernfs_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#82 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#82 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#82 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#82 &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#82 &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#82 lweventlist_lock &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#82 (console_sem).lock irq_context: 0 rtnl_mutex team->team_lock_key#82 console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex team->team_lock_key#82 console_lock console_srcu console_owner irq_context: 0 rtnl_mutex team->team_lock_key#82 console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex team->team_lock_key#82 console_lock console_srcu console_owner console_owner_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1064 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1064 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1064 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1064 irq_context: 0 rtnl_mutex team->team_lock_key#82 &sem->wait_lock irq_context: 0 rtnl_mutex team->team_lock_key#82 &p->pi_lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1064 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#431 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#431 irq_context: 0 rtnl_mutex team->team_lock_key#82 &cfs_rq->removed.lock irq_context: 0 rtnl_mutex team->team_lock_key#82 pool_lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#82 &____s->seqcount#2 irq_context: 0 rtnl_mutex team->team_lock_key#82 &pcp->lock &zone->lock irq_context: 0 rtnl_mutex team->team_lock_key#82 &____s->seqcount irq_context: 0 rtnl_mutex team->team_lock_key#82 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 (wq_completion)nfc3_nci_rx_wq#425 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1065 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#423 irq_context: 0 (wq_completion)wg-crypt-wg0#81 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#80 irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1065 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1065 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1065 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1065 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1065 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#159 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1066 irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1100 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1100 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1100 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1100 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1100 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1101 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#143 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#143 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#140 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#140 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#140 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#138 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#144 &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#144 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1101 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1102 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#453 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#453 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#447 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#144 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#141 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#139 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#139 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#445 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#23 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#23 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1102 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#454 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#454 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#454 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#454 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#448 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#446 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1103 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1103 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#449 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#449 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1104 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1105 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#447 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#145 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#145 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#142 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#140 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1105 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1106 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1107 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1107 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#432 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#432 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#426 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#426 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#426 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#424 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 fs_reclaim irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 net_rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 &tn->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#82 &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1107 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1108 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#456 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_rcv#8 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_send#8 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem k-sk_lock-AF_TIPC fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc31_nci_tx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc31_nci_tx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex nf_hook_mutex remove_cache_srcu quarantine_lock irq_context: 0 rtnl_mutex nf_hook_mutex remove_cache_srcu &c->lock irq_context: 0 rtnl_mutex nf_hook_mutex remove_cache_srcu &n->list_lock irq_context: 0 rtnl_mutex nf_hook_mutex remove_cache_srcu &rq->__lock irq_context: 0 rtnl_mutex nf_hook_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &sig->cred_guard_mutex &sig->exec_update_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc34_nci_rx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc34_nci_rx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc34_nci_tx_wq#5 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &sb->s_type->i_mutex_key#8 jbd2_handle mmu_notifier_invalidate_range_start &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1066 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1066 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1066 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1067 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1067 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1067 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#132 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#132 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#132 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#129 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#127 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#127 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#127 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#20 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#433 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#433 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci4#6 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)nfc3_nci_tx_wq#425 irq_context: 0 (wq_completion)hci4#6 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&(&hdev->cmd_timer)->work) &rq->__lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci4#6 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_tx_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc10_nci_rx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc10_nci_tx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 kn->active#46 &____s->seqcount#2 irq_context: 0 kn->active#46 &pcp->lock &zone->lock irq_context: 0 kn->active#46 &pcp->lock &zone->lock &____s->seqcount irq_context: 0 kn->active#46 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock dev_pm_qos_sysfs_mtx &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem &sem->wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#46 nsim_bus_dev_list_lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#450 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#448 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1108 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1108 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1108 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1108 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1108 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1109 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#457 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#457 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#457 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#451 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#449 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1109 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1109 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1109 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#458 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#458 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#452 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#450 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#459 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#459 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#459 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#459 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#453 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#451 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1111 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1111 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1112 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem &root->kernfs_rwsem &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#454 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#452 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1113 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1113 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1114 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1114 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1114 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1115 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1115 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1115 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1115 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1115 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1115 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1116 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#461 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#461 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#455 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#453 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1116 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1116 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1116 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1116 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1116 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1117 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1117 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1117 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#462 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#462 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#456 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#454 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#454 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#454 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1117 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1117 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1117 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1117 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock &pcp->lock &zone->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1117 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1118 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1118 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1118 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1119 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#463 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#457 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#455 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#455 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#455 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1119 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1119 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1119 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1120 irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#464 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1120 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1120 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#464 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#464 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#464 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#458 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1121 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#458 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1121 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#458 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1121 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#456 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#146 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#143 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#141 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#141 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#465 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#465 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#459 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#457 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1122 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1122 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1123 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1123 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1123 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1123 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1123 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1124 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1124 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1124 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#466 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#466 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#466 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#466 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#460 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#458 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1125 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#467 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#467 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#467 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#467 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#461 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1125 irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1125 &rq->__lock irq_context: 0 rcu_read_lock_bh dev->qdisc_tx_busylock ?: &qdisc_tx_busylock _xmit_ETHER#2 rcu_read_lock &____s->seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1125 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#459 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1125 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1125 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1126 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1126 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1126 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1127 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1127 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1127 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1127 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#468 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#468 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#468 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1127 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1128 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#462 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#462 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#460 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1129 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1129 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1129 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1129 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1129 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1130 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#463 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#461 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#147 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#147 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#147 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#147 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#147 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#144 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#144 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#144 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#144 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#144 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1130 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1130 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1130 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1131 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#470 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#470 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#470 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#462 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1131 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1131 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1131 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1131 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1131 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1132 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex uevent_sock_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#471 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#471 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#471 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#471 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#465 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#6 irq_context: 0 wq_pool_mutex &wq->mutex rcu_node_0 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#465 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#465 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#148 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#148 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1132 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1132 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#463 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#463 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#463 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1132 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1133 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#145 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#143 irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex pcpu_lock irq_context: 0 cgroup_threadgroup_rwsem freezer_mutex percpu_counters_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#472 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#472 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#472 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#472 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1134 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#466 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#464 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#464 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#464 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1134 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1135 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#473 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#473 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#467 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1136 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1136 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#468 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc20_nci_tx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#468 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#468 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#466 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1137 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1137 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1137 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1137 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1138 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1138 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc21_nci_tx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bond0#82 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem irq_context: 0 (wq_completion)bond0#82 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#82 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc22_nci_tx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc23_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1138 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1139 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1139 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1140 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#475 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#475 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#469 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#469 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#469 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#467 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#467 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#467 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1140 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#146 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1140 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#144 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1141 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1067 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1067 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1067 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1067 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1068 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#133 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#133 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#133 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#133 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#130 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#128 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#21 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#21 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#21 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#21 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1141 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1141 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1141 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1142 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1142 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1142 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1142 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1142 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1142 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1142 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1143 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1144 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1144 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1144 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1145 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#147 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#147 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#147 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#145 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1145 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1145 irq_context: 0 nfc_devlist_mutex uevent_sock_mutex &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#434 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#434 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#428 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#426 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1146 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#476 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#470 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#468 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#15 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#15 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc6_nci_tx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&peer->transmit_packet_work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &n->list_lock irq_context: 0 (wq_completion)nfc8_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_tx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &rq->__lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#11 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->tx_work) &list->lock#5 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci0#12 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#7 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)nfc15_nci_tx_wq#7 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1169 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1169 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_rx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc39_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc39_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc39_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc38_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc38_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc38_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc38_nci_tx_wq#3 irq_context: 0 (wq_completion)nfc37_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc36_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc36_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc36_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc35_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc35_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc35_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc33_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc31_nci_rx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc31_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc30_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#5 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#5 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem rtnl_mutex &rdev->wiphy.mtx (wq_completion)phy164 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#5 rcu_node_0 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#5 &rcu_state.expedited_wq irq_context: 0 (wq_completion)nfc27_nci_tx_wq#5 &rcu_state.expedited_wq &p->pi_lock irq_context: 0 (wq_completion)nfc27_nci_tx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc27_nci_tx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1169 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1169 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1169 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1169 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1169 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#489 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#489 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#483 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#481 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#154 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#22 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#22 irq_context: 0 &dev->mutex uevent_sock_mutex remove_cache_srcu &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#435 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#435 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#435 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#435 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#429 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#429 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#429 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#427 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#134 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#134 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#134 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#134 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#131 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#131 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#131 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#129 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1068 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1068 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1068 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1068 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1068 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1068 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1068 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1069 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc19_nci_rx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#154 &rq->__lock irq_context: 0 (wq_completion)nfc19_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#154 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 devlinks.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &xa->xa_lock#19 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex rtnl_mutex.wait_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rtnl_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock &tb->tb6_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock &tb->tb6_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock &tb->tb6_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock &rcu_state.gp_wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#82 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#154 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#151 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#151 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#151 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#149 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1170 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1171 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1171 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1171 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#483 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#483 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#483 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &nf_conntrack_locks[i] &nf_conntrack_locks[i]/1 batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#164 irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &sem->wait_lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#161 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &sem->wait_lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg1#162 irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override rcu_node_0 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1212 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1212 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1212 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#160 irq_context: 0 (wq_completion)wg-kex-wg2#160 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1212 irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1212 irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1212 &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock crngs.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1212 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &cookie->lock irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1213 irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &wg->static_identity.lock &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#160 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#161 irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &wg->static_identity.lock &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg2#161 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#82 irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#528 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#528 irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &r->consumer_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1171 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1171 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1171 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#492 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#492 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1172 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#486 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#484 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#484 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#484 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 pernet_ops_rwsem rtnl_mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &____s->seqcount#9 irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock rcu_read_lock_bh &list->lock#12 irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) batched_entropy_u8.lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#522 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#520 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock &____s->seqcount#2 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock &____s->seqcount irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1213 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1213 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1214 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#170 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#170 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#170 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#167 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#165 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1214 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1214 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1214 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1214 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1214 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1215 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1215 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1215 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1215 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1215 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1215 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#529 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1172 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1172 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1172 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1173 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1173 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1216 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#529 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#523 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#521 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1216 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1216 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1216 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#493 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#493 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#493 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#487 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#485 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#485 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1216 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1216 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1216 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1217 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#530 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#524 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#524 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#524 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#522 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#171 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#171 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1217 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1217 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1217 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#168 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#166 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1217 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1218 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#531 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#531 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#531 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#525 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#525 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#525 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#523 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#523 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#523 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#532 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#526 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#524 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1218 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1218 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1218 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1218 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1218 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#533 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#533 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#533 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#533 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#527 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#525 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#525 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#525 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#494 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#494 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#494 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1173 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1173 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1173 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1174 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1174 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1174 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1219 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#494 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#488 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#486 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#486 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#486 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#155 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#155 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#152 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#150 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1219 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1219 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1220 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1220 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#534 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#495 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#495 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#489 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#489 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#489 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#487 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1174 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1174 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1174 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1174 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1174 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#534 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#534 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#528 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#526 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#173 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#173 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1175 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#156 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#156 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#156 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#156 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#153 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#153 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#153 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#151 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#170 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#535 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#535 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#535 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#535 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#25 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#25 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#25 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#24 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#17 irq_context: 0 rtnl_mutex remove_cache_srcu &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1175 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1175 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1175 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1175 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1176 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#529 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1220 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1220 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1220 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#527 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1221 irq_context: 0 misc_mtx (wq_completion)nfc5_nci_tx_wq#31 irq_context: 0 misc_mtx (wq_completion)nfc5_nci_rx_wq#31 irq_context: 0 misc_mtx (wq_completion)nfc5_nci_cmd_wq#33 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#174 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#171 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#169 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1221 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1221 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1221 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1221 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1222 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#175 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#175 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#536 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#536 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#172 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#530 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#528 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#170 irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) &base->lock irq_context: 0 (wq_completion)wg-crypt-wg2#80 (work_completion)(&peer->transmit_packet_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1222 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1222 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1222 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1222 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1223 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1223 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1223 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1223 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1223 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1223 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1223 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1223 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)bond0#83 irq_context: 0 (wq_completion)bond0#83 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#83 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#83 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#83 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1176 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1176 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1176 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1176 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1176 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1177 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#496 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#496 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#32 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#26 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#496 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#26 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#26 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#26 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#25 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#490 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#488 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#157 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#157 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#17 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#154 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#25 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#152 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#152 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#152 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#16 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc7_nci_tx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#18 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1224 irq_context: 0 &dev->mutex uevent_sock_mutex nl_table_wait.lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#32 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#537 irq_context: 0 rtnl_mutex team->team_lock_key#83 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex team->team_lock_key#83 netpoll_srcu irq_context: 0 rtnl_mutex team->team_lock_key#83 net_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#83 net_rwsem &list->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#83 &tn->lock irq_context: 0 rtnl_mutex team->team_lock_key#83 _xmit_ETHER irq_context: 0 rtnl_mutex team->team_lock_key#83 &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#83 input_pool.lock irq_context: 0 rtnl_mutex team->team_lock_key#83 rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#83 &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#83 nl_table_lock irq_context: 0 rtnl_mutex team->team_lock_key#83 nl_table_wait.lock irq_context: 0 rtnl_mutex team->team_lock_key#83 rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex team->team_lock_key#83 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#83 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex team->team_lock_key#83 &in_dev->mc_tomb_lock irq_context: 0 rtnl_mutex team->team_lock_key#83 &im->lock irq_context: 0 rtnl_mutex team->team_lock_key#83 _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#83 _xmit_ETHER &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#83 _xmit_ETHER &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#83 cbs_list_lock irq_context: 0 rtnl_mutex team->team_lock_key#83 &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#83 sysfs_symlink_target_lock irq_context: 0 rtnl_mutex team->team_lock_key#83 lock irq_context: 0 rtnl_mutex team->team_lock_key#83 lock kernfs_idr_lock irq_context: 0 rtnl_mutex team->team_lock_key#83 &root->kernfs_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#83 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#83 &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#83 &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#83 &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#83 &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#83 (console_sem).lock irq_context: 0 rtnl_mutex team->team_lock_key#83 console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex team->team_lock_key#83 console_lock console_srcu console_owner irq_context: 0 rtnl_mutex team->team_lock_key#83 console_lock console_srcu console_owner &port_lock_key irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1177 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1177 irq_context: 0 rtnl_mutex team->team_lock_key#83 console_lock console_srcu console_owner console_owner_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#537 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#537 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#537 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#531 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#529 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1224 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#538 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#538 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#538 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1224 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1225 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1225 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1225 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#532 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#530 irq_context: 0 (wq_completion)events (work_completion)(&data->fib_event_work) &data->fib_lock sched_map-wait-type-override &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#176 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#176 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#176 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#176 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#173 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#171 irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu fill_pool_map-wait-type-override &rq->__lock irq_context: 0 &mm->mmap_lock &mm->mmap_lock/1 remove_cache_srcu fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1225 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1225 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1225 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1226 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1226 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1226 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1226 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1226 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1226 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1226 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1226 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1226 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1227 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1227 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1227 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#540 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#534 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#534 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#534 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1227 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1227 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1227 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1177 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1178 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1228 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1228 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#534 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#534 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#532 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#532 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#532 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1229 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1229 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#497 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#497 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1229 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#491 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#489 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1229 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#177 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#177 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#541 irq_context: 0 rtnl_mutex team->team_lock_key#83 lweventlist_lock irq_context: 0 rtnl_mutex team->team_lock_key#83 lweventlist_lock &dir->lock#2 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#541 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#177 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#535 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#533 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1178 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_rcv#9 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_rcv#9 &rq->__lock irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_rcv#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1179 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_send#9 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1179 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1179 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1180 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1180 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1180 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1180 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_crypto#9 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1180 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1180 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1181 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#498 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#498 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#498 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#492 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#490 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1181 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#499 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#499 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#493 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#491 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1182 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1182 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1182 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1182 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1182 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1182 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1183 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#500 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#500 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#500 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#494 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#494 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#494 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#492 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#492 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#155 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#153 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1183 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1183 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1183 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1184 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#495 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#493 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1184 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1184 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1184 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1185 irq_context: 0 (wq_completion)bond0#83 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#83 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#502 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1185 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1185 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1185 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#496 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1186 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#494 irq_context: 0 (wq_completion)hci0#12 (work_completion)(&(&hdev->cmd_timer)->work) irq_context: 0 (wq_completion)hci0#12 (work_completion)(&(&hdev->cmd_timer)->work) (console_sem).lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci0#12 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci0#12 (work_completion)(&(&hdev->cmd_timer)->work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci0#12 (work_completion)(&(&hdev->cmd_timer)->work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1186 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 fs_reclaim irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 net_rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#83 &tn->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex rcu_state.exp_mutex pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1186 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1187 irq_context: 0 &dev->mutex &root->kernfs_rwsem &rcu_state.expedited_wq irq_context: 0 &dev->mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock irq_context: 0 &dev->mutex &root->kernfs_rwsem &rcu_state.expedited_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1187 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1187 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1187 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1187 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1187 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1187 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1188 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#533 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#533 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#174 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1229 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#172 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1230 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#35 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#35 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#35 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#35 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#33 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#33 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1231 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1231 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1231 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#178 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#178 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#178 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#175 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#173 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#542 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#542 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#542 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#179 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#179 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#179 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#179 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#176 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#174 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#174 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#174 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#542 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#536 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#534 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1231 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1231 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1231 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1231 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1231 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1231 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1232 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#34 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#34 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#543 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#543 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#537 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#535 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#535 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#535 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1232 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1232 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1232 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1232 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1232 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#180 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#180 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#180 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1233 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1233 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1233 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#180 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#177 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1233 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1233 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#175 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#544 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#544 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#544 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#538 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1233 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1233 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1233 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#536 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#536 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#536 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1233 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1233 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1233 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1234 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1234 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1234 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1234 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1234 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1234 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1234 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1234 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1235 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#37 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#35 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#35 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#20 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#20 irq_context: 0 misc_mtx (wq_completion)nfc5_nci_cmd_wq#38 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1235 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1235 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1235 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1235 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1235 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1236 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1236 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1236 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#181 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#181 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#40 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#40 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#36 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#178 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#36 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#176 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1236 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1236 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1236 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#21 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#21 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#545 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#545 irq_context: 0 wq_pool_mutex &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#21 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#21 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_tx_wq#21 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#21 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &type->i_mutex_dir_key#4 rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1237 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1237 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1237 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#539 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1237 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1237 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1237 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1237 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1237 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1237 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1238 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1238 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#546 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#546 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#182 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#182 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#179 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#179 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#179 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#540 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#538 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#177 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1239 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1239 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1239 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1239 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1240 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1240 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1240 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1240 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1240 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1240 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1241 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#547 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1241 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1241 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1241 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1242 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#547 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#541 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#541 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#541 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#539 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#183 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#183 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#180 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#178 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1242 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1242 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1242 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem fill_pool_map-wait-type-override &c->lock irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->xattr_sem fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1242 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1242 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1242 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1188 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#503 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#503 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#497 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#495 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#495 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#495 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1242 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#548 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#548 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#548 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#542 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#540 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1243 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#549 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#543 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#541 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#541 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#184 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#184 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#181 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#179 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#41 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#41 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1243 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1243 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1243 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1243 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1188 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1189 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1243 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1243 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1244 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#550 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#550 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#544 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#544 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#544 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#542 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#542 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#542 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#159 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#159 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#156 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#154 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#185 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#185 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#182 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#182 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#180 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1244 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1244 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1245 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1245 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#551 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#551 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1189 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1189 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1189 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1189 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1190 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#551 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#551 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#545 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#545 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#545 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1245 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1245 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1245 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#543 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1245 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1245 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1246 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1246 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1246 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#186 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#186 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#183 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#181 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#181 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1246 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1246 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1246 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#496 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#496 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#496 &rq->__lock &cfs_rq->removed.lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#496 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#496 &cfs_rq->removed.lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#496 &obj_hash[i].lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_tx_wq#496 pool_lock#2 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#498 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#504 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1246 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#552 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#552 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#546 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#546 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#546 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#544 irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1247 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1190 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1191 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1247 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1247 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1247 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#505 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#499 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#497 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1247 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1248 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1191 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1191 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1191 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1192 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#553 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#553 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#553 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#545 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#187 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#187 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1192 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1192 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1192 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1192 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1192 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1193 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#506 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#506 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#506 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#506 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#500 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#184 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1248 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1194 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#507 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#507 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#501 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#499 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#499 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#499 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1194 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1194 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1194 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#160 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1248 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#182 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1248 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1249 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#42 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#38 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#38 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#554 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#548 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#546 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#546 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#546 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1249 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1249 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1249 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1250 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1250 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1250 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#188 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#188 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#185 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1250 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1250 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#183 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1250 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1250 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1250 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#43 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#43 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#22 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#22 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#22 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#22 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#22 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#22 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#39 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#22 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#39 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1266 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1266 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1266 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1267 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1267 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1267 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1267 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1267 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1268 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#566 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#560 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#560 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#560 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#558 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1268 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1268 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1268 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#190 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#190 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#188 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#23 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#23 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#23 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_tx_wq#23 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1270 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#567 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#567 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#561 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#559 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#44 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#44 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#44 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#44 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#44 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#44 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#40 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#40 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1271 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#568 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#568 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#562 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#562 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#562 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1271 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1271 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1271 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1271 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1271 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1272 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1272 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1272 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1272 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1272 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1272 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1272 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1272 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1273 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1273 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#569 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#569 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#569 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#569 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#563 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#561 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1274 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1274 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1274 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1275 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1275 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1276 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1276 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1276 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#570 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#570 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#564 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#562 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1276 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1277 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1277 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1277 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1277 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#565 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#563 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1277 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1277 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1278 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#194 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#194 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#194 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#194 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#191 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#189 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1278 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1278 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1279 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#564 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#195 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#195 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#195 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1279 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1279 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#195 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#192 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#190 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1279 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1279 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1280 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#573 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#573 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#573 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#573 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#567 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#567 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#567 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#565 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1280 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1280 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1281 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1281 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1281 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1281 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1281 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1281 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1282 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#574 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#574 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#163 (work_completion)(&peer->transmit_handshake_work) &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg1#162 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &list->lock#14 irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh &r->producer_lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh tk_core.seq.seqcount irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1282 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1282 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#574 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#568 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#566 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1282 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1283 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#575 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#575 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#575 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#575 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#569 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#567 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1283 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1283 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1284 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1284 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1284 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1284 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1284 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1284 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1284 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1284 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1285 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#576 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#576 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#576 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#576 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#570 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#568 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1285 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1285 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1285 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1286 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1286 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1287 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1287 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1287 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1288 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#577 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#577 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#571 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#571 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#571 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#569 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#569 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#569 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1288 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1288 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1288 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1288 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1288 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1288 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1288 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1289 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#578 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#578 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#578 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#578 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#570 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1289 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1289 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1289 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1289 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1289 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1290 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#579 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#579 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#579 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#579 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#573 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#573 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#573 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#571 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1290 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1290 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1290 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1291 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1291 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1291 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#196 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#196 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#196 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#193 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#191 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#580 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#580 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#580 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#580 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#574 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#572 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#581 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#581 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#581 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1291 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1291 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1291 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1291 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1291 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1291 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1291 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1292 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1292 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1292 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#582 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#582 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#582 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#582 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1292 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1292 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1292 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#576 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1292 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#573 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1292 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1292 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1292 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1293 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#197 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#194 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#192 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#192 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#192 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1293 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1293 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#49 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#49 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#45 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#45 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1315 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1315 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1315 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1315 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1315 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1315 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#600 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#600 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#600 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1316 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1316 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1316 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#600 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#593 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1316 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1316 irq_context: 0 (wq_completion)wg-crypt-wg1#70 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1317 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#590 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#208 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#208 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#208 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#205 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#203 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1317 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1317 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1317 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1317 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1317 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1318 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#601 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#601 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#594 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#591 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#591 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#591 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1318 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1318 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1318 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1318 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1318 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1318 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1318 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1319 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1319 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1319 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#602 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#602 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#602 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#602 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#595 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#592 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1319 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1319 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1319 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1320 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#603 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#209 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#603 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#603 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#209 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#209 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1320 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1320 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1320 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1321 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1321 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#209 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#206 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#204 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#593 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1321 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1321 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1321 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1321 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1321 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1322 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#604 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#597 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#594 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#594 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#605 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1322 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1322 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1322 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1322 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1322 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#605 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1323 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#598 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#598 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#595 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1323 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1323 irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock batched_entropy_u8.lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock kfence_freelist_lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#83 &nsim_trap_data->trap_lock &meta->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1323 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1324 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1324 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1324 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1324 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1324 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1324 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1324 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1325 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#606 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#599 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1325 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1325 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#596 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#210 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#210 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1326 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#205 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1326 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1326 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1326 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1326 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1327 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#607 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#607 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#597 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1327 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1327 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1327 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1328 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#608 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#608 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#608 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#601 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#598 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1328 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1328 irq_context: 0 sb_writers#3 sb_internal jbd2_handle rcu_read_lock &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1329 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1329 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#609 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#609 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#609 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#602 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#599 irq_context: 0 misc_mtx (wq_completion)nfc2_nci_rx_wq#1329 irq_context: 0 misc_mtx (wq_completion)nfc2_nci_cmd_wq#1329 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#211 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#208 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#206 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1330 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1330 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1330 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1330 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1330 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1330 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#610 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#610 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#603 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#600 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#212 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#212 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#209 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#207 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#207 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#207 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1331 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#611 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#604 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1331 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1331 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1331 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#210 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#601 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#208 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1332 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1332 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1332 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#612 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#612 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#612 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#612 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#605 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#605 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#605 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#602 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#602 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#602 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#214 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#211 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#209 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1333 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1333 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1333 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1333 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1333 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1333 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#613 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#613 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#606 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#606 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#603 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#614 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#614 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#614 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#614 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#614 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1335 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1335 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1334 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1334 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#215 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#215 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#210 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#50 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#50 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#46 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#46 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#46 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#607 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#604 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#46 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#25 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#25 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#25 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_rx_wq#25 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#25 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1336 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1336 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1336 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1335 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1335 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#615 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#615 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#608 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1337 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1337 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1336 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1336 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#216 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#216 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#216 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#213 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#213 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#211 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#605 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#211 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#211 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1338 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1338 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1337 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1337 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1339 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1338 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1338 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#616 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#609 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#606 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#606 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#606 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#217 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#217 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#214 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#214 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#214 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#212 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#212 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#212 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1340 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1340 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1339 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#51 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#51 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#51 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#51 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#51 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#51 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#47 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#47 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#47 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#47 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#47 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#47 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock irq_context: 0 (wq_completion)wg-crypt-wg1#74 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock crngs.lock base_crng.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1341 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1341 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1341 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1341 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1340 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1340 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc10_nci_rx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_rx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc29_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc29_nci_rx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#6 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc29_nci_rx_wq#6 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc29_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc30_nci_rx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc30_nci_tx_wq#6 irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &table->lock#2 irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock rcu_read_lock_bh &peer->keypairs.keypair_update_lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg0#164 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc35_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc35_nci_tx_wq#5 &rq->__lock irq_context: 0 (wq_completion)nfc35_nci_tx_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc35_nci_tx_wq#5 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc37_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc37_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc37_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#617 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#617 irq_context: 0 (wq_completion)nfc37_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#610 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#610 &rq->__lock irq_context: 0 (wq_completion)nfc37_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#610 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc37_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#607 irq_context: 0 (wq_completion)nfc37_nci_tx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc37_nci_tx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1342 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1342 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1342 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1341 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1341 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1341 irq_context: 0 (wq_completion)nfc36_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc36_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#218 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#218 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#218 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#215 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#213 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#213 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#52 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#52 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#52 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#52 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#48 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#48 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#48 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#48 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#26 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#26 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#26 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#26 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#26 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc30_nci_rx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#17 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc10_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc9_nci_tx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#219 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#219 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#219 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#219 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#216 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#214 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc14_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#53 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#53 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#49 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#49 &rq->__lock irq_context: 0 (wq_completion)nfc19_nci_rx_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#49 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc19_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#49 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#49 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#49 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#19 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#19 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc22_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc23_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_tx_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#618 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#618 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#611 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#608 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#608 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1343 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1343 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1342 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#27 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#27 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#27 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#27 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#27 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_tx_wq#27 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#220 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#220 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#217 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#215 irq_context: 0 (wq_completion)wg-crypt-wg0#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#54 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#54 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#54 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#54 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#50 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#50 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#19 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_tx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc12_nci_rx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc15_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc19_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc38_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc38_nci_cmd_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc38_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc38_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc38_nci_tx_wq#4 irq_context: 0 (wq_completion)nfc38_nci_tx_wq#4 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc34_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc34_nci_tx_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc33_nci_tx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#20 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#55 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#55 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#51 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#51 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#51 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#28 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#28 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#28 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#221 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#221 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#218 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#216 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#216 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1344 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1344 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1344 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1343 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1343 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1343 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#619 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#619 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#619 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#619 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#612 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#612 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#612 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#609 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1345 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1345 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1344 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#620 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#620 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#613 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1344 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#610 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1345 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1345 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#621 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#614 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1347 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#611 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1347 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1347 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1347 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1346 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1346 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#222 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#222 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#219 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#217 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#622 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#622 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#622 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#615 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#615 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#615 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#612 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#223 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#223 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#220 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#218 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1348 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1348 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1347 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1347 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#616 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#613 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#224 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#224 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#221 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#219 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1349 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1349 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1349 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1348 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1348 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#624 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#624 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#624 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#624 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#617 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#614 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1350 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1350 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1349 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1349 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#225 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#225 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#222 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1349 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#220 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#625 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#618 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#615 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#615 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#615 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#56 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#56 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#56 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#56 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#52 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#52 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1351 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1351 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1350 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#626 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#626 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#619 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#616 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#627 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#627 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#620 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#620 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#620 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#221 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_rx_wq#621 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#628 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#57 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#57 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#53 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1352 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1352 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1351 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1351 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1353 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1353 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1353 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1352 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1352 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1352 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1352 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#629 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#629 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#629 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#629 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#622 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#618 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1354 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1354 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1353 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1353 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1353 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1353 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1355 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1355 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1354 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1354 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1354 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1354 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#630 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#623 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#619 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#619 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#619 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#227 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#227 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#227 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#227 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#224 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#224 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#222 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#222 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#222 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1356 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1356 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1356 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1356 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1356 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1355 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1355 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1355 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1355 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#620 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1357 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1357 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1357 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#632 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#632 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#625 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#625 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#625 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#621 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1356 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1356 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1356 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1358 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 jbd2_handle &ei->i_data_sem key#3 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1357 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1357 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1357 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1357 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#633 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#633 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#626 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#626 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#626 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#622 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#225 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#223 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1359 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#634 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1359 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1359 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1359 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1358 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1358 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1358 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#627 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#623 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1358 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#635 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#635 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#635 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#628 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#624 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1360 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1360 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1360 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1360 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1359 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1359 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1361 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1361 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1360 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1360 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1360 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1360 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#636 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#636 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#636 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#636 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#629 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#625 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1362 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1362 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1362 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1362 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1361 irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex &cfs_rq->removed.lock irq_context: 0 &dev->master_mutex &dev->mode_config.idr_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1361 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#637 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#637 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#229 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#229 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#637 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#630 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#226 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#224 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#626 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1363 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1362 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1362 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1362 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1362 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#638 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#638 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1364 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#631 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1364 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#627 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1364 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1363 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1363 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1363 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &f->f_pos_lock &mm->mmap_lock &cfs_rq->removed.lock irq_context: 0 &f->f_pos_lock &mm->mmap_lock &obj_hash[i].lock irq_context: 0 &f->f_pos_lock &mm->mmap_lock pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1363 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1363 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1363 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1363 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1365 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1365 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1364 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1364 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1364 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1364 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1364 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1364 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#225 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#639 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1366 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1366 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#632 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#632 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#632 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#632 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#632 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#632 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#628 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1365 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1365 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#231 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#231 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#231 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#228 irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh batched_entropy_u32.lock irq_context: 0 (wq_completion)wg-crypt-wg1#81 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#226 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#640 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#640 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#640 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#633 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#629 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1367 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1366 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1366 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1367 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1367 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#641 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#641 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#641 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#634 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#630 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#630 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#630 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1369 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1369 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1368 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1368 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1370 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1370 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1370 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1369 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1369 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1369 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1369 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#232 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#232 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#229 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#227 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#227 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#227 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#642 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#642 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#635 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#631 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#58 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#58 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#58 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#58 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#54 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#54 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#54 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#54 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1371 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1371 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1370 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1370 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#643 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#643 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#636 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#632 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#632 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#632 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1372 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1372 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1372 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1371 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1371 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1371 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1371 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#644 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#644 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#644 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#644 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#637 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#637 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#637 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#633 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#633 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#633 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1373 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1373 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#645 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#645 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#645 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1373 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1373 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1373 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1372 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1372 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#233 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#645 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#638 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#634 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#233 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#230 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#228 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#228 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#228 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1373 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1373 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1373 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1373 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#239 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#239 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1405 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1405 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1405 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1404 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1404 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1404 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1404 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#666 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#666 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#666 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#666 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#659 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#655 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1406 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1406 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1405 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1405 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1405 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1405 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#667 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#667 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#667 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#667 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#660 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#656 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#656 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#656 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#245 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#242 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#240 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#61 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#61 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#61 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#56 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#21 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#21 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#56 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#14 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#14 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc12_nci_rx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_tx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#14 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_rx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_tx_wq#15 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_tx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc19_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&rfkill->sync_work) rfkill_global_mutex &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc13_nci_tx_wq#16 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_tx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc24_nci_tx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc28_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc28_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1407 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1407 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1406 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1406 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1406 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#668 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#668 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#661 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#657 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#29 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#29 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#29 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#29 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#29 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#29 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#29 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#246 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#246 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#246 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#246 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#243 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#243 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#241 irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rcu_read_lock rcu_node_0 irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rcu_read_lock &rq->__lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#15 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#17 &rq->__lock irq_context: 0 (wq_completion)nfc9_nci_rx_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#62 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#62 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#57 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#57 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#22 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#22 &rq->__lock irq_context: 0 (wq_completion)nfc7_nci_rx_wq#22 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_tx_wq#22 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#15 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc10_nci_tx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc12_nci_rx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc15_nci_rx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc23_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#82 (work_completion)(&peer->transmit_packet_work) &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc26_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc34_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#8 irq_context: 0 &u->iolock &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc30_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1408 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1407 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1407 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#669 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#669 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#669 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#669 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#662 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#658 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1409 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1409 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1409 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#670 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#670 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#670 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#670 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1408 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#663 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#663 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#663 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#659 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#659 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#659 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1408 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1410 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1410 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1409 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#672 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#672 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#665 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#661 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1411 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1411 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1411 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1410 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1410 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#673 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1412 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1412 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#673 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#666 irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 sk_lock-AF_INET fill_pool_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1411 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#662 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1411 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#662 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#662 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1413 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1413 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1413 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1413 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1413 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1412 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1412 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1412 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1412 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1414 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1414 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1414 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1413 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1413 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1413 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1413 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1413 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1413 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#674 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#674 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#674 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#674 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#667 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#663 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1415 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1415 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1415 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#675 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#675 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#675 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1415 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1414 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1414 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1414 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#675 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#668 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#664 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#676 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#676 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#676 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#669 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1416 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1416 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#665 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1415 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1415 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1415 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1415 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#247 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#247 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#244 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#242 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#242 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1417 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1417 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1416 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1416 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#248 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#248 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#248 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#245 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#243 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1417 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1417 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1417 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#63 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#63 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#63 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#63 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#58 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1417 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#58 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#58 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#58 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#677 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#677 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#677 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#249 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#249 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#677 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#670 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#670 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#670 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#666 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#246 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#244 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1419 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1419 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1419 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1418 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1418 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1418 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1418 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#678 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#678 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#678 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#671 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#671 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#671 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#667 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#667 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#667 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1420 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1420 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1420 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1420 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1419 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1419 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1419 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1421 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1421 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1421 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1420 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1420 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1420 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1420 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1420 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1420 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#679 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#679 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#679 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#672 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#672 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#668 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 sb_internal jbd2_handle &obj_hash[i].lock pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1422 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1422 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1421 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1421 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1421 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1421 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#680 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#680 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#680 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#673 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#673 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#673 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#669 irq_context: 0 nfc_devlist_mutex deferred_probe_mutex &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1423 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1423 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1422 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1422 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1422 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1422 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1422 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1424 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1424 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1423 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_rx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_tx_wq#16 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1423 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1423 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1423 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1424 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1424 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1424 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_tx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1426 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1426 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1426 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1426 &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#19 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#19 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1425 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1425 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1425 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1425 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#19 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#19 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc15_nci_tx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc15_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#8 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc20_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#8 irq_context: 0 misc_mtx (wq_completion)nfc21_nci_tx_wq#9 irq_context: 0 misc_mtx (wq_completion)nfc21_nci_rx_wq#9 irq_context: 0 misc_mtx (wq_completion)nfc21_nci_rx_wq#9 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc21_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc21_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc23_nci_rx_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc23_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#670 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#250 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#250 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#247 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#245 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#245 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#245 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#64 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#64 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#64 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#64 irq_context: 0 &u->iolock rcu_read_lock &cfs_rq->removed.lock irq_context: 0 &u->iolock rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#59 irq_context: 0 &u->iolock rcu_read_lock pool_lock#2 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#59 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#30 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#30 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#30 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#30 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#24 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#24 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#24 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#23 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#23 &rq->__lock irq_context: 0 (wq_completion)nfc7_nci_rx_wq#23 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_tx_wq#23 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#18 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#17 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_rx_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc12_nci_rx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1427 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1427 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1427 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1427 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1426 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1426 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_tx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc20_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc20_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc21_nci_rx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc21_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#11 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex triggers_list_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc22_nci_tx_wq#11 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex triggers_list_lock &obj_hash[i].lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex triggers_list_lock pool_lock#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc25_nci_tx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc23_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc42_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc42_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc42_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#682 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#682 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#675 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#671 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#671 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#671 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#251 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#251 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#251 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#251 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#248 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#246 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#246 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#65 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#65 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#60 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#60 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#31 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#31 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#31 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#31 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#25 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#25 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#24 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#24 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#19 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#19 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#19 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_rx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc14_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1428 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1428 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1427 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1427 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#21 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#21 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_rx_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_tx_wq#21 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#13 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc50_nci_cmd_wq irq_context: 0 (wq_completion)nfc50_nci_cmd_wq irq_context: 0 (wq_completion)nfc50_nci_rx_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc49_nci_cmd_wq irq_context: 0 (wq_completion)nfc49_nci_cmd_wq irq_context: 0 (wq_completion)nfc49_nci_rx_wq irq_context: 0 (wq_completion)nfc49_nci_tx_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc48_nci_cmd_wq irq_context: 0 (wq_completion)nfc48_nci_cmd_wq irq_context: 0 (wq_completion)nfc48_nci_rx_wq irq_context: 0 (wq_completion)nfc48_nci_tx_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc47_nci_cmd_wq irq_context: 0 (wq_completion)nfc47_nci_cmd_wq irq_context: 0 (wq_completion)nfc47_nci_rx_wq irq_context: 0 (wq_completion)nfc47_nci_tx_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc46_nci_cmd_wq irq_context: 0 (wq_completion)nfc46_nci_cmd_wq irq_context: 0 (wq_completion)nfc46_nci_rx_wq irq_context: 0 (wq_completion)nfc46_nci_tx_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc45_nci_cmd_wq irq_context: 0 (wq_completion)nfc45_nci_cmd_wq irq_context: 0 (wq_completion)nfc45_nci_rx_wq irq_context: 0 (wq_completion)nfc45_nci_tx_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc44_nci_cmd_wq irq_context: 0 (wq_completion)nfc44_nci_cmd_wq irq_context: 0 (wq_completion)nfc44_nci_rx_wq irq_context: 0 (wq_completion)nfc44_nci_tx_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc43_nci_cmd_wq irq_context: 0 (wq_completion)nfc43_nci_cmd_wq irq_context: 0 (wq_completion)nfc43_nci_rx_wq irq_context: 0 (wq_completion)nfc43_nci_tx_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc41_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc41_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc41_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc41_nci_tx_wq#2 irq_context: 0 (wq_completion)nfc40_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc40_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc39_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc39_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc39_nci_rx_wq#4 &rq->__lock irq_context: 0 (wq_completion)nfc39_nci_rx_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc39_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc38_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc38_nci_cmd_wq#5 irq_context: 0 (wq_completion)nfc38_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc38_nci_tx_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc37_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc37_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc37_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc37_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc37_nci_tx_wq#7 &rq->__lock irq_context: 0 (wq_completion)nfc37_nci_tx_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc36_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc36_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc36_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc35_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc35_nci_tx_wq#6 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc34_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc34_nci_tx_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#8 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#8 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc33_nci_tx_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc31_nci_rx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc31_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc30_nci_tx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#247 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#66 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#66 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#66 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#683 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1429 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#683 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#683 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1428 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1428 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1428 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1428 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#66 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#683 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#61 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#676 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#676 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#676 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#253 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#253 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#253 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#61 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#61 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#61 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#253 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#250 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#672 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#248 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1430 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1430 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1430 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1429 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1429 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1429 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1429 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#684 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#684 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#677 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#673 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#673 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#673 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#251 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#251 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#251 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#249 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#249 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#249 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#67 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#67 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#67 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#67 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#62 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#62 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#62 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#62 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1431 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1431 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1431 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#685 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#685 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1431 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#678 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1430 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1430 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1430 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1430 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1430 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#674 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#32 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#32 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#32 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#32 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#32 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#255 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#255 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#252 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#250 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1432 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1432 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1432 &rq->__lock cpu_asid_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1432 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1431 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1431 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1431 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1433 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1433 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1432 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1432 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1432 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1432 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1432 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1432 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#686 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#686 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#686 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#686 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#679 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#679 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#679 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#675 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#675 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#675 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1434 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1434 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1433 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1433 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1433 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#687 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#687 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#687 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#687 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#680 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#680 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#680 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1435 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1435 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1435 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#676 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1435 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1434 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1434 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1434 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1434 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1434 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1436 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1435 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#688 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1435 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1435 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#688 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#688 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#688 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#681 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1435 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#677 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#677 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#677 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#256 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#256 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#253 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#253 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#251 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#251 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#251 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1437 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1437 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1437 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#678 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1437 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1436 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1436 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1436 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1436 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#257 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#257 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#257 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#257 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#254 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#252 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#252 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#252 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1438 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1438 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#68 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#68 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#63 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#63 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#63 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1437 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1437 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1439 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1439 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1438 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1438 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1438 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1438 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#690 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1438 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1438 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#690 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#683 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#679 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#684 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#680 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1440 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1440 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1440 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1439 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#692 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#692 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#692 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1439 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#692 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#685 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#681 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1441 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1441 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1441 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#693 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#693 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#693 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#693 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1440 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#686 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#686 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#686 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#682 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1440 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#682 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#682 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#258 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#258 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#255 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#253 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#253 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#253 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1442 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1442 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1442 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1442 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1441 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1441 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1441 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#694 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#694 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#694 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#687 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#687 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#687 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#683 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#683 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#683 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1443 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1443 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1443 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1443 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1442 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1442 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1442 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1444 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1444 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1444 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1443 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1443 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1443 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#695 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#695 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1443 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1443 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1443 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#688 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#688 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#688 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#684 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#684 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#684 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#259 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#256 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#254 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#254 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#254 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#696 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#696 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#696 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#696 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#689 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#69 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#69 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#685 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#64 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#64 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1445 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1445 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1445 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1445 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1444 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1444 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1444 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#260 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#260 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#257 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#697 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#697 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#255 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#255 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#255 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#690 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#690 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#690 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#686 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1446 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1446 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1445 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1445 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1447 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1447 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1446 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1446 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1446 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1446 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#698 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#698 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#698 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#691 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#691 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#687 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#261 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#261 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#258 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#256 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#70 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#65 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#65 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1447 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1447 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1447 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1447 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#699 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#699 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#692 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#688 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#262 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#262 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#259 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#257 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1448 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#700 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#700 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#700 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#700 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#693 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#689 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#263 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#260 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#258 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1450 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1449 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1449 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1449 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1449 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#690 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#690 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#690 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#71 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#71 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#71 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#71 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#66 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#66 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#66 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#66 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#66 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#66 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#264 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#264 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1451 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1451 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1451 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#261 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#261 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#261 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1451 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#259 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#259 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#259 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1450 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1450 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#702 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#702 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#703 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#703 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#265 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#265 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#265 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#703 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#265 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#696 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#262 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#260 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#260 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#260 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#692 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1452 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1452 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1451 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#72 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#72 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#72 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#72 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#67 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#67 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1452 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1452 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#266 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#266 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#263 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#261 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1454 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1454 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1453 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1453 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#264 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#262 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#704 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#704 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#697 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#697 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#697 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#693 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#693 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#693 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1455 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1455 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1454 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1454 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1454 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1454 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#265 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#705 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#705 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#698 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#263 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#694 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1456 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1455 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1455 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1455 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1455 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#706 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1455 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1455 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#706 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#706 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#706 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#699 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#695 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#269 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#269 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#264 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1457 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1457 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1457 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1456 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1456 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1456 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#696 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#696 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#696 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#270 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#270 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#270 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#267 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#265 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1472 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1472 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#719 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#712 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#708 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1474 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1474 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1474 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1474 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1473 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1473 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#74 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#275 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#74 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#74 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#271 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#269 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#74 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#69 irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#6 (work_completion)(&(&conn->disc_work)->work) &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &list->lock#6 irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &list->lock#7 irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_rx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#18 &rq->__lock irq_context: 0 (wq_completion)nfc12_nci_rx_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_tx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_tx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_tx_wq#18 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#18 &rq->__lock irq_context: 0 (wq_completion)nfc12_nci_tx_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#17 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc17_nci_rx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_tx_wq#16 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc17_nci_tx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#14 irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#18 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#17 &rq->__lock irq_context: 0 (wq_completion)nfc17_nci_rx_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#12 stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#12 &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#12 key irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#12 pcpu_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#12 percpu_counters_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#12 pcpu_lock stock_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#12 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc22_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc22_nci_tx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc23_nci_tx_wq#10 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu fill_pool_map-wait-type-override &c->lock irq_context: 0 misc_mtx &dev->mutex rfkill_global_mutex remove_cache_srcu fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_tx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#9 irq_context: 0 misc_mtx hrtimer_bases.lock fill_pool_map-wait-type-override pool_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1475 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1475 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1474 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1474 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc30_nci_tx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc30_nci_tx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#720 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#720 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#720 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#720 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#713 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#713 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#713 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#709 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#709 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#709 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#33 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#33 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#33 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#33 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#33 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#276 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#276 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#272 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1476 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1476 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#270 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1475 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#270 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#270 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1475 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1475 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#75 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#75 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#70 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#70 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#70 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#70 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1475 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1475 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1475 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#26 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#26 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#26 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#19 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#20 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#20 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#18 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#18 &rq->__lock irq_context: 0 (wq_completion)nfc10_nci_tx_wq#18 irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#22 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#22 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_rx_wq#22 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_tx_wq#22 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc14_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#19 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#18 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc15_nci_rx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_rx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#19 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#19 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#18 &rq->__lock irq_context: 0 (wq_completion)nfc17_nci_tx_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc20_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc22_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc23_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc23_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc23_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc29_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc29_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#13 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_rx_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#721 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#721 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#721 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#721 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#714 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#710 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#277 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#277 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#273 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#273 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#273 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#271 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1477 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1477 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1476 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#71 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#71 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#71 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#71 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#71 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#34 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#34 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#34 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#34 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#34 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_tx_wq#34 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#27 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#27 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#27 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#27 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#26 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#26 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#20 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#21 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#21 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#21 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#21 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#21 &rq->__lock irq_context: 0 (wq_completion)nfc9_nci_tx_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#19 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#19 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#19 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#19 &rq->__lock irq_context: 0 (wq_completion)nfc10_nci_rx_wq#19 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_tx_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#19 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#19 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#19 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_rx_wq#19 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_tx_wq#19 irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#20 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#16 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc15_nci_tx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#15 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#19 irq_context: 0 (wq_completion)nfc17_nci_tx_wq#19 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#16 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc18_nci_tx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#25 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#25 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#25 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#25 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#24 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#24 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#24 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#21 irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#21 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#21 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#21 &rq->__lock irq_context: 0 (wq_completion)nfc12_nci_rx_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_tx_wq#21 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#20 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_tx_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#20 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#20 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#20 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#22 irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#22 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#22 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#22 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#22 irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#21 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#21 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#28 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#28 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#27 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#27 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#35 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#35 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#35 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_rx_wq#35 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#35 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#35 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#77 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#77 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#77 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#77 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#72 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#72 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#72 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1478 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1478 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1477 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1477 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#278 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#278 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#274 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#272 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#722 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#722 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#715 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#711 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1479 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1478 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1478 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1478 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1478 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#716 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#712 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1479 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1479 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1479 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1481 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1481 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1481 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1481 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1481 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#724 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#724 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#724 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#724 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#717 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1480 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1480 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1480 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#713 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1480 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1482 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1482 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1481 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1481 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1481 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1481 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1481 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1481 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#725 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#725 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#725 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#279 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#279 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#275 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#718 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#714 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#273 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1483 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1483 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1482 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1484 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1484 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1484 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1484 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1483 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1483 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1483 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1483 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#280 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#280 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#280 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#726 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#726 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#719 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#715 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#280 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1485 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#276 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1485 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1485 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#274 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#274 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#274 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1484 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1484 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1484 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1484 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1484 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1486 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1486 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1486 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1485 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1485 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1485 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1485 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#727 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1485 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1485 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#727 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#727 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#727 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#720 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#720 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#720 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#716 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#716 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#716 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#281 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#281 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#281 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#281 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#277 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#275 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1486 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1486 irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->id_addr_timer)->work) irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &x->wait#3 irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-crypt-wg0#75 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-kex-wg0#149 (work_completion)(&peer->transmit_handshake_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &peer->endpoint_lock rcu_read_lock_bh &obj_hash[i].lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#728 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1488 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1488 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1487 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1487 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock (work_completion)(&(&conn->info_timer)->work) irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->disc_work)->work) irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->auto_accept_work)->work) irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock (work_completion)(&(&conn->idle_work)->work) irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &x->wait#3 irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &rq->__lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1487 irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx &root->kernfs_rwsem irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dev_pm_qos_sysfs_mtx dev_pm_qos_mtx irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem kernfs_idr_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &root->kernfs_rwsem &obj_hash[i].lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock kernfs_idr_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock subsys mutex#74 &k->k_lock klist_remove_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock deferred_probe_mutex irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock device_links_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &c->lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &n->list_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci3#5 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1488 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1488 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#729 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#729 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#729 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#721 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#721 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#721 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#717 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1490 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1490 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1490 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#282 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#282 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1490 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1489 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1489 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#278 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#276 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1490 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1490 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1490 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1490 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#730 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#730 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#730 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#730 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1492 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1492 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1492 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#722 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#718 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1492 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1491 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1491 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#283 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#283 irq_context: 0 (wq_completion)events_unbound (work_completion)(&rdev->wiphy_work) &rdev->wiphy.mtx fill_pool_map-wait-type-override &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1493 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1493 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1492 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1492 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1492 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1492 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#731 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#731 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#731 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#731 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#284 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#284 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#284 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#284 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#723 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#280 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#719 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#719 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#719 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#278 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1494 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1494 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1494 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1493 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1493 irq_context: 0 &sb->s_type->i_mutex_key#9 key irq_context: 0 &sb->s_type->i_mutex_key#9 pcpu_lock irq_context: 0 &sb->s_type->i_mutex_key#9 percpu_counters_lock irq_context: 0 &sb->s_type->i_mutex_key#9 pcpu_lock stock_lock irq_context: 0 &sb->s_type->i_mutex_key#9 quarantine_lock irq_context: 0 (wq_completion)wg-kex-wg1#130 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &n->list_lock &c->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1495 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1495 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1494 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#732 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#732 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#732 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1496 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1496 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1496 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#732 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1496 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#724 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#724 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#724 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1495 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1495 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1495 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#281 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#281 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#281 &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#281 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1495 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#279 irq_context: 0 sb_writers#3 sb_internal &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1497 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1497 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1497 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1496 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1496 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#733 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#733 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#725 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#721 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1498 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1498 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1497 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1497 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1498 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1498 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1498 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1498 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1498 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1498 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1500 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1500 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1499 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1499 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1499 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#734 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#734 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#726 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#722 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1500 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1500 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1500 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1502 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1501 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1501 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#735 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#735 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#735 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#735 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#727 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#727 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#723 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#723 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#723 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1503 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1503 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1502 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1502 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#286 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#286 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#286 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#286 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#282 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#282 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#282 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#736 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#724 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#724 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#724 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#280 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#280 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#280 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#280 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#280 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1504 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1503 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1503 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#737 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#737 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#737 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#737 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#729 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#729 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#729 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#725 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#725 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#725 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1505 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1505 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1505 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1505 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1504 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1504 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1504 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1504 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#283 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#283 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1506 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1506 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1505 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1505 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1505 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1505 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#281 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#281 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#281 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#281 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#281 &obj_hash[i].lock irq_context: 0 &dev->mutex (work_completion)(&rfkill->uevent_work) &cfs_rq->removed.lock irq_context: 0 &dev->mutex (work_completion)(&rfkill->uevent_work) &obj_hash[i].lock irq_context: 0 &dev->mutex (work_completion)(&rfkill->uevent_work) pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1506 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1506 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1506 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1506 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#738 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#738 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#738 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#726 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#739 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#739 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#739 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1508 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#731 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#731 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1507 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1507 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#727 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#727 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1509 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1509 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1509 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1508 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1508 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#740 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#740 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#732 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#288 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#728 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#288 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#284 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#284 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#284 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#282 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1509 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1509 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#733 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#729 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#729 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#729 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#742 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#742 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1511 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1511 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#742 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#734 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#734 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#734 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#730 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#730 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#730 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1511 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1510 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1510 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1510 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1510 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1510 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1510 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#289 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#289 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1512 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1512 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1512 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1512 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1511 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1511 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1511 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1511 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#743 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#743 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#735 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#731 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#731 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#731 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#290 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#286 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#284 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1513 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1513 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1512 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1512 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#744 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#736 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#732 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1514 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1514 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1513 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1513 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1513 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#745 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#745 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#737 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#733 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1513 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1513 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#291 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#291 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#291 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#291 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#287 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#287 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#285 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#285 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1515 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1515 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1515 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1515 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1514 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1514 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1514 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#746 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#746 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#746 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1515 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#746 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#746 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#746 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#292 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#292 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#292 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#738 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#292 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#734 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#288 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#286 irq_context: 0 sb_writers#5 &type->i_mutex_dir_key#5/1 tomoyo_ss rcu_read_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1517 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#747 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#747 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#747 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1516 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1516 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#747 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#739 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#735 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#735 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#735 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1518 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#748 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#748 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1517 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1517 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#740 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#736 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#736 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#736 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#78 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#78 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#78 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#73 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#293 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#73 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#293 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#73 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#73 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#73 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#73 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#293 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#289 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#289 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#289 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#287 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#287 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#287 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1519 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1519 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1519 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1519 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1518 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1518 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#749 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#749 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#749 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#749 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#741 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#741 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#741 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1520 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1520 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1519 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1519 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1519 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1519 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#294 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#294 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#290 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#288 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#288 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#288 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#288 stock_lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#288 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#288 key irq_context: 0 (wq_completion)nfc4_nci_tx_wq#288 pcpu_lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#288 percpu_counters_lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#288 pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#79 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#74 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#74 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#74 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#74 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1521 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1521 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#750 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#750 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#750 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#750 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#742 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#742 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#738 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#738 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#738 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1522 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1522 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1521 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#751 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#743 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#739 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#739 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#739 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1521 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#295 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#295 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#291 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#291 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#291 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#289 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#752 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#752 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#752 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#744 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#740 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#740 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#740 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1523 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1523 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1523 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1523 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1522 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1522 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1522 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#296 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#296 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#296 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#296 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#292 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1522 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#292 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#292 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#290 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#80 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#80 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#80 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1524 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1524 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#80 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#75 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#75 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#75 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#75 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1523 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1523 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1523 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#741 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#754 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#754 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#754 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#754 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#746 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#746 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#742 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#36 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#36 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#36 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#36 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#36 irq_context: 0 misc_mtx nfc_devlist_mutex &meta->lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#37 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#37 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#37 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#38 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#38 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#38 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#38 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#38 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#38 irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#21 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc10_nci_cmd_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#21 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#21 &rq->__lock irq_context: 0 (wq_completion)nfc10_nci_rx_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc10_nci_tx_wq#21 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc14_nci_tx_wq#17 irq_context: 0 misc_mtx &rq->__lock &base->lock &obj_hash[i].lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#17 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#17 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#15 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc32_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_tx_wq#12 irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#13 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#13 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc25_nci_rx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc25_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc25_nci_tx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc25_nci_tx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#5 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc39_nci_cmd_wq#5 irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#9 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc36_nci_cmd_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc39_nci_rx_wq#5 irq_context: 0 (wq_completion)nfc39_nci_tx_wq#5 irq_context: 0 (wq_completion)nfc36_nci_cmd_wq#9 irq_context: 0 (wq_completion)nfc36_nci_rx_wq#9 irq_context: 0 (wq_completion)nfc36_nci_tx_wq#9 irq_context: 0 (wq_completion)nfc36_nci_tx_wq#9 &rq->__lock irq_context: 0 (wq_completion)nfc36_nci_tx_wq#9 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq#3 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc42_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc42_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc42_nci_rx_wq#3 &rq->__lock irq_context: 0 (wq_completion)nfc42_nci_rx_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc42_nci_tx_wq#3 &rq->__lock irq_context: 0 (wq_completion)nfc42_nci_tx_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc49_nci_cmd_wq#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1525 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1525 irq_context: 0 (wq_completion)nfc49_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc49_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1524 irq_context: 0 (wq_completion)nfc49_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#297 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#297 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#293 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#293 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#293 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#291 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#81 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#81 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#81 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#81 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#76 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#76 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#755 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#755 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#755 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#747 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#743 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#29 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#29 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#28 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#28 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#28 &rq->__lock irq_context: 0 (wq_completion)nfc7_nci_tx_wq#28 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc8_nci_tx_wq#22 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#22 &rq->__lock irq_context: 0 (wq_completion)nfc8_nci_tx_wq#22 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#757 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#757 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#757 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#757 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#748 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#748 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#748 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#744 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#39 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#744 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#744 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#39 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#39 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#39 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#39 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#39 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#39 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_tx_wq#39 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#39 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_tx_wq#39 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc11_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc11_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#21 irq_context: 0 (wq_completion)nfc11_nci_rx_wq#21 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_rx_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc11_nci_tx_wq#21 irq_context: 0 (wq_completion)nfc11_nci_tx_wq#21 &rq->__lock irq_context: 0 (wq_completion)nfc11_nci_tx_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#22 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc12_nci_cmd_wq#22 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc12_nci_cmd_wq#22 irq_context: 0 (wq_completion)nfc12_nci_rx_wq#22 irq_context: 0 (wq_completion)nfc12_nci_tx_wq#22 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#17 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc6_nci_tx_wq#40 irq_context: 0 &ndev->req_lock (wq_completion)nfc15_nci_cmd_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc6_nci_rx_wq#40 irq_context: 0 misc_mtx (wq_completion)nfc6_nci_rx_wq#40 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc6_nci_rx_wq#40 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx (wq_completion)nfc6_nci_cmd_wq#40 irq_context: 0 (wq_completion)nfc15_nci_cmd_wq#17 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#17 irq_context: 0 (wq_completion)nfc15_nci_rx_wq#17 &rq->__lock irq_context: 0 (wq_completion)nfc15_nci_rx_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc15_nci_tx_wq#17 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1526 irq_context: 0 (wq_completion)nfc15_nci_tx_wq#17 &rq->__lock irq_context: 0 (wq_completion)nfc15_nci_tx_wq#17 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1526 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1526 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1526 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1525 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1525 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1525 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc9_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc9_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc9_nci_rx_wq#23 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#23 irq_context: 0 (wq_completion)nfc9_nci_tx_wq#23 &rq->__lock irq_context: 0 (wq_completion)nfc9_nci_tx_wq#23 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc14_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc14_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc10_nci_rx_wq#22 irq_context: 0 (wq_completion)nfc10_nci_tx_wq#22 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#41 irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#41 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc14_nci_rx_wq#18 &rq->__lock irq_context: 0 (wq_completion)nfc14_nci_rx_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc14_nci_tx_wq#18 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#41 irq_context: 0 (wq_completion)nfc6_nci_tx_wq#41 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc20_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc20_nci_cmd_wq#11 irq_context: 0 misc_mtx (wq_completion)nfc2_nci_cmd_wq#1527 irq_context: 0 misc_mtx (wq_completion)nfc2_nci_cmd_wq#1527 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc2_nci_cmd_wq#1527 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc20_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc20_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc52_nci_cmd_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc52_nci_cmd_wq &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc52_nci_cmd_wq &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc52_nci_cmd_wq irq_context: 0 (wq_completion)nfc52_nci_rx_wq irq_context: 0 (wq_completion)nfc52_nci_rx_wq &rq->__lock irq_context: 0 (wq_completion)nfc52_nci_rx_wq &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc52_nci_tx_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc52_nci_tx_wq &rq->__lock irq_context: 0 (wq_completion)nfc52_nci_tx_wq &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#13 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc21_nci_cmd_wq#13 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc21_nci_cmd_wq#13 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc21_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc21_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc21_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc21_nci_tx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc21_nci_tx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#82 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#82 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc19_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#77 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#77 irq_context: 0 (wq_completion)nfc19_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc19_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#16 irq_context: 0 (wq_completion)nfc19_nci_tx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc19_nci_tx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#42 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#42 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#42 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#42 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#42 irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#42 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#42 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc23_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc51_nci_cmd_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc51_nci_cmd_wq &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc51_nci_cmd_wq &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc23_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc23_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc23_nci_tx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc23_nci_tx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc24_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc8_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc8_nci_cmd_wq#23 irq_context: 0 (wq_completion)nfc8_nci_rx_wq#23 irq_context: 0 (wq_completion)nfc8_nci_tx_wq#23 irq_context: 0 (wq_completion)nfc51_nci_cmd_wq irq_context: 0 (wq_completion)nfc51_nci_rx_wq irq_context: 0 (wq_completion)nfc51_nci_tx_wq irq_context: 0 &ndev->req_lock (wq_completion)nfc48_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc48_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc24_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc24_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#11 irq_context: 0 (wq_completion)nfc24_nci_tx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc24_nci_tx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc48_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc48_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc47_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc47_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc47_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc47_nci_rx_wq#2 &rq->__lock irq_context: 0 (wq_completion)nfc47_nci_rx_wq#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc28_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc28_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc47_nci_tx_wq#2 irq_context: 0 (wq_completion)nfc47_nci_tx_wq#2 &rq->__lock irq_context: 0 (wq_completion)nfc47_nci_tx_wq#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc28_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc28_nci_rx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc45_nci_cmd_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc45_nci_cmd_wq#2 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc45_nci_cmd_wq#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc45_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc28_nci_rx_wq#16 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc28_nci_rx_wq#16 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc45_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc28_nci_tx_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc43_nci_cmd_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc43_nci_cmd_wq#2 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc43_nci_cmd_wq#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#11 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc29_nci_cmd_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc29_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc43_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc43_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc29_nci_rx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc29_nci_rx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc43_nci_tx_wq#2 irq_context: 0 (wq_completion)nfc43_nci_tx_wq#2 &rq->__lock irq_context: 0 (wq_completion)nfc43_nci_tx_wq#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc29_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc30_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc30_nci_cmd_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc39_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc39_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc39_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc30_nci_rx_wq#11 &rq->__lock irq_context: 0 (wq_completion)nfc30_nci_rx_wq#11 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc39_nci_tx_wq#6 irq_context: 0 (wq_completion)nfc39_nci_tx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc39_nci_tx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc30_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#10 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc34_nci_cmd_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc34_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc34_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc34_nci_tx_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc37_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc37_nci_cmd_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#12 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc25_nci_cmd_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc25_nci_cmd_wq#12 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#12 irq_context: 0 (wq_completion)nfc25_nci_rx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc25_nci_rx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc37_nci_rx_wq#8 irq_context: 0 (wq_completion)nfc37_nci_rx_wq#8 &rq->__lock irq_context: 0 (wq_completion)nfc25_nci_tx_wq#12 irq_context: 0 (wq_completion)nfc37_nci_rx_wq#8 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc25_nci_tx_wq#12 &rq->__lock irq_context: 0 (wq_completion)nfc25_nci_tx_wq#12 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc37_nci_tx_wq#8 irq_context: 0 &ndev->req_lock (wq_completion)nfc40_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc40_nci_cmd_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc38_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc40_nci_rx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc38_nci_cmd_wq#6 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc38_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc40_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#7 irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#7 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc35_nci_cmd_wq#7 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc35_nci_cmd_wq#7 irq_context: 0 (wq_completion)nfc38_nci_cmd_wq#6 irq_context: 0 (wq_completion)nfc35_nci_rx_wq#7 irq_context: 0 (wq_completion)nfc38_nci_cmd_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc38_nci_cmd_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc35_nci_tx_wq#7 irq_context: 0 (wq_completion)nfc38_nci_rx_wq#6 irq_context: 0 (wq_completion)nfc38_nci_rx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc38_nci_rx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc32_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc38_nci_tx_wq#6 &rq->__lock irq_context: 0 (wq_completion)nfc32_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc38_nci_tx_wq#6 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc32_nci_rx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc32_nci_rx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc32_nci_rx_wq#14 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc32_nci_rx_wq#14 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc32_nci_tx_wq#14 irq_context: 0 (wq_completion)nfc32_nci_tx_wq#14 &rq->__lock irq_context: 0 (wq_completion)nfc32_nci_tx_wq#14 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc33_nci_cmd_wq#10 irq_context: 0 (wq_completion)nfc33_nci_cmd_wq#10 irq_context: 0 &ndev->req_lock (wq_completion)nfc31_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc31_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc31_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc31_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#15 irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#15 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc26_nci_cmd_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc26_nci_cmd_wq#15 irq_context: 0 (wq_completion)nfc26_nci_rx_wq#15 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#15 irq_context: 0 (wq_completion)nfc26_nci_tx_wq#15 &rq->__lock irq_context: 0 (wq_completion)nfc26_nci_tx_wq#15 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc27_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc33_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc27_nci_cmd_wq#11 irq_context: 0 (wq_completion)nfc27_nci_rx_wq#11 irq_context: 0 (wq_completion)nfc27_nci_tx_wq#11 irq_context: 0 &ndev->req_lock (wq_completion)nfc22_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc22_nci_cmd_wq#14 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc33_nci_tx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc33_nci_tx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc22_nci_rx_wq#14 irq_context: 0 (wq_completion)nfc22_nci_tx_wq#14 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#18 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc18_nci_cmd_wq#18 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc18_nci_cmd_wq#18 irq_context: 0 (wq_completion)nfc18_nci_rx_wq#18 irq_context: 0 (wq_completion)nfc18_nci_tx_wq#18 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#21 irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#21 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc17_nci_cmd_wq#21 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_cmd_wq#21 irq_context: 0 (wq_completion)nfc36_nci_rx_wq#10 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#20 irq_context: 0 (wq_completion)nfc36_nci_tx_wq#10 irq_context: 0 (wq_completion)nfc17_nci_rx_wq#20 &rq->__lock irq_context: 0 (wq_completion)nfc36_nci_tx_wq#10 &rq->__lock irq_context: 0 (wq_completion)nfc17_nci_rx_wq#20 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc36_nci_tx_wq#10 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc17_nci_tx_wq#20 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#16 irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#16 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc16_nci_cmd_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_cmd_wq#16 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#16 irq_context: 0 (wq_completion)nfc16_nci_rx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_rx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc16_nci_rx_wq#16 &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc16_nci_rx_wq#16 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#16 irq_context: 0 (wq_completion)nfc16_nci_tx_wq#16 &rq->__lock irq_context: 0 (wq_completion)nfc16_nci_tx_wq#16 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc41_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc41_nci_cmd_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#26 irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#26 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc13_nci_cmd_wq#26 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc13_nci_cmd_wq#26 irq_context: 0 (wq_completion)nfc13_nci_rx_wq#25 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#25 irq_context: 0 (wq_completion)nfc13_nci_tx_wq#25 &rq->__lock irq_context: 0 (wq_completion)nfc13_nci_tx_wq#25 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc41_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc41_nci_tx_wq#3 irq_context: 0 (wq_completion)nfc41_nci_tx_wq#3 &rq->__lock irq_context: 0 (wq_completion)nfc41_nci_tx_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc44_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc44_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc44_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc44_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc42_nci_cmd_wq#4 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc42_nci_cmd_wq#4 irq_context: 0 (wq_completion)nfc42_nci_rx_wq#4 irq_context: 0 (wq_completion)nfc42_nci_tx_wq#4 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#298 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#298 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#294 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#292 irq_context: 0 &ndev->req_lock (wq_completion)nfc7_nci_cmd_wq#30 irq_context: 0 (wq_completion)nfc7_nci_cmd_wq#30 irq_context: 0 (wq_completion)nfc7_nci_rx_wq#29 irq_context: 0 (wq_completion)nfc7_nci_tx_wq#29 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#758 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#758 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#749 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#745 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1528 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1528 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1526 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1526 irq_context: 0 &ndev->req_lock (wq_completion)nfc49_nci_cmd_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc49_nci_cmd_wq#3 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc49_nci_cmd_wq#3 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock &base->lock irq_context: 0 (wq_completion)events (work_completion)(&(&nsim_dev->trap_data->trap_report_dw)->work) &devlink->lock_key#82 &nsim_trap_data->trap_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)nfc49_nci_cmd_wq#3 irq_context: 0 (wq_completion)nfc49_nci_rx_wq#3 irq_context: 0 (wq_completion)nfc49_nci_tx_wq#3 irq_context: 0 &ndev->req_lock (wq_completion)nfc50_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc50_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc50_nci_rx_wq#2 irq_context: 0 (wq_completion)nfc50_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc46_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc46_nci_cmd_wq#2 irq_context: 0 (wq_completion)nfc46_nci_rx_wq#2 &rq->__lock irq_context: 0 (wq_completion)nfc46_nci_rx_wq#2 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc46_nci_tx_wq#2 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1529 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1529 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1527 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1527 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#759 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#759 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#759 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#750 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#750 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#750 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1530 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1530 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1530 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#746 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1530 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1528 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1528 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1528 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1528 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1528 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1528 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#760 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#760 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#760 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#299 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#299 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#295 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#760 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#751 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#293 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#747 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#747 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#747 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1531 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1531 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1531 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1531 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1529 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1529 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1529 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1529 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1529 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#761 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#761 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#761 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#761 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#752 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#752 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#748 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1530 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1530 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1530 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1530 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#762 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#762 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#762 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#762 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#300 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#300 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#753 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#300 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#300 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#753 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#753 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#749 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1533 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#749 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1533 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#749 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_rx_wq#296 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#296 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#296 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1531 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1531 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1531 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#294 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1531 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1531 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1531 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#83 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#78 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#78 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1534 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1534 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#763 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#763 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#763 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1534 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#763 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#754 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1532 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#750 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#301 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#301 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#297 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#295 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1535 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1535 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1535 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1535 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1535 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1535 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1533 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1533 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1533 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1533 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1533 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1533 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#764 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#764 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#764 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#764 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#755 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#755 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#751 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1534 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1534 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1534 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1534 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#765 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#756 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#756 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#752 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1537 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1537 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1535 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1535 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#302 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#302 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#298 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#298 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#298 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#296 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#296 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#296 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1538 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1538 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1538 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1536 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1536 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1536 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1536 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1539 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1539 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1537 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1537 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1537 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1537 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1537 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1537 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#766 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#766 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#766 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#766 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#757 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1540 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1540 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1538 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1538 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1538 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#767 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1538 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#758 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#758 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#758 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#754 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#754 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#754 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#303 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#303 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#84 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#299 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#84 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#84 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#84 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#297 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#297 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#297 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_rx_wq#79 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#79 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#768 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#768 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#768 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#768 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#759 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1541 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1541 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1541 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#755 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1539 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1539 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1542 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1540 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1540 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1540 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1540 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1543 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1543 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1543 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1541 irq_context: 0 misc_mtx (wq_completion)nfc5_nci_cmd_wq#85 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1541 irq_context: 0 misc_mtx (wq_completion)nfc6_nci_cmd_wq#43 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#80 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#80 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#80 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#44 irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#44 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc6_nci_cmd_wq#44 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_cmd_wq#44 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#43 irq_context: 0 (wq_completion)nfc6_nci_rx_wq#43 &rq->__lock irq_context: 0 (wq_completion)nfc6_nci_rx_wq#43 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc6_nci_tx_wq#43 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1544 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1544 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#300 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1544 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1542 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#298 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1542 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#769 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#769 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#87 irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#87 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#81 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#760 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#756 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#81 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1543 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1543 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1543 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#770 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#770 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#770 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#761 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#757 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#757 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#757 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#305 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#305 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#305 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#305 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#301 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#299 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#88 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#88 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#88 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#88 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#88 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#88 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#82 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#82 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#82 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#82 irq_context: 0 (wq_completion)nfc5_nci_tx_wq#82 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_tx_wq#82 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1546 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1546 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1546 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1544 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1544 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1544 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1544 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#771 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#771 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#771 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#762 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#758 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#89 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#89 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#89 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_cmd_wq#89 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#83 irq_context: 0 (wq_completion)nfc5_nci_rx_wq#83 &rq->__lock irq_context: 0 (wq_completion)nfc5_nci_rx_wq#83 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#83 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#306 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#306 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#302 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#302 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#302 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#300 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#300 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#300 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1545 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1545 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1545 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#772 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#772 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#772 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#772 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#763 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#763 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#763 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#759 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#759 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#759 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1548 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1548 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1548 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1548 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1546 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1546 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1546 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1546 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#307 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#303 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#301 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#773 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#773 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#773 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#773 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#773 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#773 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#764 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#764 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#764 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#760 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#760 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#760 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#774 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#774 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#774 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#765 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#761 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#761 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#761 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#308 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#308 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#308 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#308 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#304 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#304 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#304 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1549 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#302 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1549 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#302 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#302 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1547 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#90 irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#90 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc5_nci_cmd_wq#90 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc5_nci_tx_wq#84 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1547 irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &dir->lock#2 irq_context: 0 (wq_completion)wg-crypt-wg1#80 (work_completion)(&peer->transmit_packet_work) &peer->endpoint_lock rcu_read_lock_bh rcu_read_lock &ul->lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#775 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#775 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#766 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#766 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#762 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1548 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1548 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#309 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#309 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#309 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#776 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#776 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#776 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#309 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#305 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#305 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#305 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#776 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#767 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#303 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#763 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#763 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#763 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#303 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#303 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1551 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1551 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1549 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1549 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1549 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1549 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1552 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1552 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1550 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1550 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#777 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1553 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1553 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1553 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem key irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem pcpu_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem pool_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem percpu_counters_lock irq_context: 0 &type->i_mutex_dir_key#4 &root->kernfs_rwsem pcpu_lock stock_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1553 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1551 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1551 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1551 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#777 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#777 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1551 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#777 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#768 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#764 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#764 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1554 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1554 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1554 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1554 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1552 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1552 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1553 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1553 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#778 irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#778 &rq->__lock irq_context: 0 misc_mtx (wq_completion)nfc3_nci_cmd_wq#778 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1556 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1556 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1556 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#779 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#769 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#769 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1554 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1554 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#765 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1554 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#310 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#306 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#304 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1557 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1557 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1557 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1557 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1555 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1555 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1555 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1555 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1555 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1558 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1558 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1558 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1556 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1556 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1559 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1559 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1557 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1557 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1557 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1557 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#770 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#766 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1560 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1560 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1560 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1558 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1558 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1561 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1561 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1562 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1562 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1560 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1560 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1563 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1563 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1563 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1561 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1561 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1561 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1561 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1561 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1561 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#767 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#767 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#767 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1564 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1564 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1564 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1564 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1562 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1562 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1562 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#311 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#311 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#311 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#311 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#307 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#307 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#305 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#305 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1565 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1565 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1565 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1563 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1563 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1563 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1563 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1563 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1563 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#772 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#768 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1566 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1566 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1566 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1566 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1564 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1564 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1564 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#783 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#783 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#783 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1564 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#783 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#773 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1567 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1567 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1567 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1565 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1565 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1565 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1565 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#784 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#784 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#784 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#774 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#774 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#774 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#770 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1568 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1568 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1568 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1566 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1566 irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#312 irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#312 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#308 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#308 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_tx_wq#306 irq_context: 0 (wq_completion)nfc4_nci_tx_wq#306 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#785 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#785 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#785 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_rx_wq#775 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#775 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#771 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1569 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1569 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1567 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1567 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1568 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1569 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1569 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1570 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1571 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1571 irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-crypt-wg2#74 (work_completion)(&peer->transmit_packet_work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)wg-kex-wg1#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &handshake->lock &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) &c->lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)wg-kex-wg2#147 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock_bh rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1575 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1573 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1573 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1574 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1574 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1574 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1574 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1574 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1578 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1578 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1576 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1576 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1576 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1576 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1577 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1577 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1580 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1578 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1578 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1578 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1578 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1583 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1583 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1581 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1581 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1581 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1581 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1581 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1582 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1582 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1583 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1583 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1583 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1586 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1584 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1584 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1584 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1584 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1585 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1585 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1585 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1585 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1585 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1585 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &hdev->req_lock (wq_completion)hci1#2 irq_context: 0 sb_writers#3 &type->i_mutex_dir_key#3/1 &type->i_mutex_dir_key#3 &obj_hash[i].lock pool_lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1586 irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &cfs_rq->removed.lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override &obj_hash[i].lock irq_context: 0 (wq_completion)rcu_gp (work_completion)(&rew->rew_work) sched_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1587 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1587 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1590 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1588 irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock &hdev->req_wait_q irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) &list->lock#7 irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) fs_reclaim irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) &list->lock#5 irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &list->lock#7 irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) lock#6 irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) lock#6 kcov_remote_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) fs_reclaim irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &c->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) (console_sem).lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner &port_lock_key irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) console_lock console_srcu console_owner console_owner_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &rq->__lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) rcu_read_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->req_wait_q &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) lock#6 &kcov->lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock (&timer.timer) irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) &data->read_wait irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) rcu_read_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) &c->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) &data->read_wait &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock &c->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) &n->list_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->cmd_work) &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1616 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1616 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1616 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1616 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1616 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1616 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 misc_mtx nfc_devlist_mutex &root->kernfs_rwsem &rq->__lock cpu_asid_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#791 irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#791 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#781 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#781 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#781 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#777 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_tx_wq#777 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1619 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1619 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1619 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1619 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1617 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1617 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#792 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#792 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#792 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#782 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#782 &rq->__lock irq_context: 0 (wq_completion)nfc3_nci_rx_wq#782 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_tx_wq#778 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1620 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1620 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1618 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1618 irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#793 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc3_nci_cmd_wq#793 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1621 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1621 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1621 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc3_nci_cmd_wq#793 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1621 irq_context: 0 (wq_completion)nfc3_nci_rx_wq#783 irq_context: 0 (wq_completion)nfc3_nci_tx_wq#779 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1619 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1619 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1619 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1619 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1619 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1619 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#314 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#314 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#314 &cfs_rq->removed.lock irq_context: 0 &ndev->req_lock (wq_completion)nfc4_nci_cmd_wq#314 &obj_hash[i].lock irq_context: 0 (wq_completion)nfc4_nci_cmd_wq#314 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#310 irq_context: 0 (wq_completion)nfc4_nci_rx_wq#310 &rq->__lock irq_context: 0 (wq_completion)nfc4_nci_rx_wq#310 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc4_nci_tx_wq#308 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1622 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1622 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1622 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1620 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1620 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1620 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1623 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1623 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1591 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1591 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1591 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1591 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1589 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1589 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1589 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1589 irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &hdev->req_lock hci_sk_list.lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) fs_reclaim irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &c->lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) hci_sk_list.lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->power_on) &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) chan_list_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &c->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &x->wait#9 irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &xa->xa_lock#17 &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &k->list_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock lock kernfs_idr_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock bus_type_sem irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock sysfs_symlink_target_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &root->kernfs_rwsem irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &dev->power.lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock dpm_list_mtx irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &c->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &n->list_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rlock-AF_NETLINK irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex rcu_read_lock &ei->socket.wq.wait &ep->lock &ep->wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex nl_table_wait.lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock uevent_sock_mutex &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &k->k_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock subsys mutex#74 &k->k_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &list->lock#7 irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &c->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &hdev->unregister_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->cmd_sync_work) irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->cmd_sync_work) &hdev->cmd_sync_work_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->cmd_sync_work) &hdev->req_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock chan_list_lock irq_context: 0 (wq_completion)hci1#3 (work_completion)(&hdev->cmd_sync_work) &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->ident_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &list->lock#8 irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_cb_list_lock &conn->chan_lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock &base->lock &obj_hash[i].lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->rx_work) &hdev->lock hci_sk_list.lock irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->tx_work) irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->tx_work) &list->lock#8 irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->tx_work) tk_core.seq.seqcount irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->tx_work) &list->lock#5 irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->tx_work) &data->read_wait irq_context: 0 (wq_completion)hci1#4 (work_completion)(&hdev->tx_work) &list->lock#7 irq_context: 0 (wq_completion)hci1#4 (work_completion)(&conn->pending_rx_work) irq_context: 0 (wq_completion)hci1#4 (work_completion)(&conn->pending_rx_work) &list->lock#9 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1592 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1592 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1590 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1590 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1590 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1593 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1593 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1593 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1593 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1591 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1591 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1591 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1591 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1591 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1591 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1592 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1592 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1592 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1592 &rq->__lock irq_context: 0 (wq_completion)bond0#84 irq_context: 0 (wq_completion)bond0#84 &rq->__lock irq_context: 0 (wq_completion)bond0#84 (work_completion)(&(&slave->notify_work)->work) irq_context: 0 (wq_completion)bond0#84 (work_completion)(&(&slave->notify_work)->work) &obj_hash[i].lock irq_context: 0 (wq_completion)bond0#84 (work_completion)(&(&slave->notify_work)->work) &base->lock irq_context: 0 (wq_completion)bond0#84 (work_completion)(&(&slave->notify_work)->work) &base->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#84 irq_context: 0 rtnl_mutex team->team_lock_key#84 fs_reclaim irq_context: 0 rtnl_mutex team->team_lock_key#84 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 rtnl_mutex team->team_lock_key#84 netpoll_srcu irq_context: 0 rtnl_mutex team->team_lock_key#84 net_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#84 net_rwsem &list->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#84 &tn->lock irq_context: 0 rtnl_mutex team->team_lock_key#84 _xmit_ETHER irq_context: 0 rtnl_mutex team->team_lock_key#84 &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#84 input_pool.lock irq_context: 0 rtnl_mutex team->team_lock_key#84 &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#84 &n->list_lock irq_context: 0 rtnl_mutex team->team_lock_key#84 &n->list_lock &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#84 rcu_read_lock &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#84 &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#84 nl_table_lock irq_context: 0 rtnl_mutex team->team_lock_key#84 nl_table_wait.lock irq_context: 0 rtnl_mutex team->team_lock_key#84 rcu_read_lock &pool->lock irq_context: 0 rtnl_mutex team->team_lock_key#84 rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 rtnl_mutex team->team_lock_key#84 rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 rtnl_mutex team->team_lock_key#84 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#84 rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 rtnl_mutex team->team_lock_key#84 &rq->__lock irq_context: 0 rtnl_mutex team->team_lock_key#84 &in_dev->mc_tomb_lock irq_context: 0 rtnl_mutex team->team_lock_key#84 &im->lock irq_context: 0 rtnl_mutex team->team_lock_key#84 _xmit_ETHER &c->lock irq_context: 0 rtnl_mutex team->team_lock_key#84 cbs_list_lock irq_context: 0 rtnl_mutex team->team_lock_key#84 &ndev->lock irq_context: 0 rtnl_mutex team->team_lock_key#84 sysfs_symlink_target_lock irq_context: 0 rtnl_mutex team->team_lock_key#84 lock irq_context: 0 rtnl_mutex team->team_lock_key#84 lock kernfs_idr_lock irq_context: 0 rtnl_mutex team->team_lock_key#84 &root->kernfs_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#84 &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 rtnl_mutex team->team_lock_key#84 lweventlist_lock irq_context: 0 rtnl_mutex team->team_lock_key#84 lweventlist_lock &dir->lock#2 irq_context: 0 rtnl_mutex team->team_lock_key#84 (console_sem).lock irq_context: 0 rtnl_mutex team->team_lock_key#84 console_lock console_srcu console_owner_lock irq_context: 0 rtnl_mutex team->team_lock_key#84 console_lock console_srcu console_owner irq_context: 0 rtnl_mutex team->team_lock_key#84 console_lock console_srcu console_owner &port_lock_key irq_context: 0 rtnl_mutex team->team_lock_key#84 console_lock console_srcu console_owner console_owner_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1595 irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1595 &rq->__lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1595 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1595 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1593 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1593 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_send#10 irq_context: 0 (wq_completion)netns net_cleanup_work pernet_ops_rwsem (wq_completion)tipc_crypto#10 irq_context: 0 (wq_completion)bond0#84 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 (wq_completion)bond0#84 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex &tn->lock irq_context: 0 (wq_completion)bond0#84 (work_completion)(&(&slave->notify_work)->work) rtnl_mutex.wait_lock irq_context: 0 (wq_completion)bond0#84 (work_completion)(&(&slave->notify_work)->work) &p->pi_lock irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1596 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1596 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1594 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1594 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1595 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1595 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1596 &rq->__lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1596 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override &pcp->lock &zone->lock irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override &____s->seqcount irq_context: 0 (wq_completion)wg-crypt-wg0#66 (work_completion)(&({ do { const void *__vpp_verify = (typeof((worker) + 0))((void *)0); (void)__vpp_verify; } while (0); ({ unsigned long __ptr; __ptr = (unsigned long) ((typeof(*((worker))) *)((worker))); (typeof((typeof(*((worker))) *)((worker)))) (__ptr + (((__per_cpu_offset[(cpu)])))); }); })->work) rcu_read_lock &pool->lock fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1599 irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1597 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1597 irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex fill_pool_map-wait-type-override &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex fill_pool_map-wait-type-override pool_lock#2 irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex fill_pool_map-wait-type-override &n->list_lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex fill_pool_map-wait-type-override &n->list_lock &c->lock irq_context: 0 (wq_completion)events (work_completion)(&rfkill_global_led_trigger_work) rfkill_global_mutex fill_pool_map-wait-type-override pool_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 fs_reclaim irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 rcu_read_lock &ndev->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 &obj_hash[i].lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 nl_table_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 nl_table_wait.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 net_rwsem irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 net_rwsem &list->lock#2 irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 &tn->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 &n->list_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 &n->list_lock &c->lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 batched_entropy_u8.lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 kfence_freelist_lock irq_context: 0 (wq_completion)events (linkwatch_work).work rtnl_mutex team->team_lock_key#84 &meta->lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1598 irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1598 irq_context: 0 &mm->mmap_lock &folio_wait_table[i] &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 crngs.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 devlinks.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &xa->xa_lock#19 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &obj_hash[i].lock pool_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &(&net->nexthop.notifier_chain)->rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock rcu_read_lock &pool->lock &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 &ndev->req_lock (wq_completion)nfc2_nci_cmd_wq#1601 irq_context: 0 (wq_completion)nfc2_nci_cmd_wq#1601 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &cfs_rq->removed.lock irq_context: 0 (wq_completion)nfc2_nci_rx_wq#1599 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &tb->tb6_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &tb->tb6_lock &net->ipv6.fib6_walker_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &tb->tb6_lock &data->fib_event_queue_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &tb->tb6_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1599 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &tb->tb6_lock &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &tb->tb6_lock &n->list_lock &c->lock irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1599 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &rcu_state.gp_wq irq_context: 0 (wq_completion)nfc2_nci_tx_wq#1599 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &rcu_state.gp_wq &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rcu_read_lock &rcu_state.gp_wq &p->pi_lock &rq->__lock &cfs_rq->removed.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &(&fn_net->fib_chain)->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &rq->__lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &rq->__lock &per_cpu_ptr(group->pcpu, cpu)->seq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 stack_depot_init_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex bpf_devs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex bpf_devs_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex bpf_devs_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex bpf_devs_lock rcu_read_lock rhashtable_bucket irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex pin_fs_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 rcu_read_lock rename_lock.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 &dentry->d_lock &wq irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 &s->s_inode_list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 tk_core.seq.seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 &sb->s_type->i_lock_key#7 &dentry->d_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &sb->s_type->i_mutex_key#3 &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &xa->xa_lock#4 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &base->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &base->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex (work_completion)(&(&devlink_port->type_warn_dw)->work) irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &devlink_port->type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex net_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex net_rwsem &list->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &tn->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &x->wait#9 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex gdp_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex gdp_mutex &k->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex gdp_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex gdp_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex gdp_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex gdp_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex gdp_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex gdp_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex gdp_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex gdp_mutex kobj_ns_type_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex lock kernfs_idr_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &root->kernfs_rwsem &root->kernfs_iattr_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex bus_type_sem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex sysfs_symlink_target_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &root->kernfs_rwsem irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex dpm_list_mtx irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex uevent_sock_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex uevent_sock_mutex fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex uevent_sock_mutex fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex uevent_sock_mutex &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex uevent_sock_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex uevent_sock_mutex &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex uevent_sock_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex subsys mutex#20 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex subsys mutex#20 &k->k_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &dir->lock#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex uevent_sock_mutex &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex uevent_sock_mutex &n->list_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex dev_hotplug_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex dev_hotplug_mutex &dev->power.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex dev_base_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex input_pool.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex rcu_read_lock &pool->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex rcu_read_lock &pool->lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex rcu_read_lock &pool->lock &p->pi_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex batched_entropy_u32.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &tbl->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex sysctl_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex nl_table_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex nl_table_wait.lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex failover_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex pcpu_alloc_mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex pcpu_alloc_mutex pcpu_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex proc_inum_ida.xa_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex proc_subdir_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &idev->mc_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &idev->mc_lock fs_reclaim irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &idev->mc_lock fs_reclaim mmu_notifier_invalidate_range_start irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &idev->mc_lock &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &idev->mc_lock &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &idev->mc_lock _xmit_ETHER irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &pnettable->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex smc_ib_devices.mutex irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &vn->sock_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex rcu_read_lock &ndev->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 remove_cache_srcu irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 remove_cache_srcu quarantine_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 remove_cache_srcu &c->lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 remove_cache_srcu &n->list_lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 remove_cache_srcu &obj_hash[i].lock irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &____s->seqcount#2 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 &____s->seqcount irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex rcu_node_0 irq_context: 0 sb_writers#7 &of->mutex kn->active#47 nsim_bus_dev_list_lock &dev->mutex &devlink->lock_key#84 rtnl_mutex &rq->__lock